]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
ffi.c (ffi_prep_closure_loc): Turn INSN into an unsigned int.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1c9c5e43 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
94f4765c
NF
177/* Nonzero if we want SPE SIMD instructions. */
178int rs6000_spe;
179
a3170dc6
AH
180/* Nonzero if we want SPE ABI extensions. */
181int rs6000_spe_abi;
182
94f4765c
NF
183/* Nonzero to use isel instructions. */
184int rs6000_isel;
185
5da702b1
AH
186/* Nonzero if floating point operations are done in the GPRs. */
187int rs6000_float_gprs = 0;
188
594a51fe
SS
189/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
190int rs6000_darwin64_abi;
191
a0ab749a 192/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 193static GTY(()) int common_mode_defined;
c81bebd7 194
9878760c
RK
195/* Save information from a "cmpxx" operation until the branch or scc is
196 emitted. */
9878760c
RK
197rtx rs6000_compare_op0, rs6000_compare_op1;
198int rs6000_compare_fp_p;
874a0744 199
874a0744
MM
200/* Label number of label created for -mrelocatable, to call to so we can
201 get the address of the GOT section */
202int rs6000_pic_labelno;
c81bebd7 203
b91da81f 204#ifdef USING_ELFOS_H
c81bebd7 205/* Which abi to adhere to */
9739c90c 206const char *rs6000_abi_name;
d9407988
MM
207
208/* Semantics of the small data area */
209enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
210
211/* Which small data model to use */
815cdc52 212const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
213
214/* Counter for labels which are to be placed in .fixup. */
215int fixuplabelno = 0;
874a0744 216#endif
4697a36c 217
c4501e62
JJ
218/* Bit size of immediate TLS offsets and string from which it is decoded. */
219int rs6000_tls_size = 32;
220const char *rs6000_tls_size_string;
221
b6c9286a
MM
222/* ABI enumeration available for subtarget to use. */
223enum rs6000_abi rs6000_current_abi;
224
85b776df
AM
225/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
226int dot_symbols;
227
38c1f2d7 228/* Debug flags */
815cdc52 229const char *rs6000_debug_name;
38c1f2d7
MM
230int rs6000_debug_stack; /* debug stack applications */
231int rs6000_debug_arg; /* debug argument handling */
232
aabcd309 233/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
234bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
235
58646b77
PB
236/* Built in types. */
237
238tree rs6000_builtin_types[RS6000_BTI_MAX];
239tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 240
57ac7be9
AM
241const char *rs6000_traceback_name;
242static enum {
243 traceback_default = 0,
244 traceback_none,
245 traceback_part,
246 traceback_full
247} rs6000_traceback;
248
38c1f2d7
MM
249/* Flag to say the TOC is initialized */
250int toc_initialized;
9ebbca7d 251char toc_label_name[10];
38c1f2d7 252
44cd321e
PS
253/* Cached value of rs6000_variable_issue. This is cached in
254 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
255static short cached_can_issue_more;
256
d6b5193b
RS
257static GTY(()) section *read_only_data_section;
258static GTY(()) section *private_data_section;
259static GTY(()) section *read_only_private_data_section;
260static GTY(()) section *sdata2_section;
261static GTY(()) section *toc_section;
262
a3c9585f
KH
263/* Control alignment for fields within structures. */
264/* String from -malign-XXXXX. */
025d9908
KH
265int rs6000_alignment_flags;
266
78f5898b
AH
267/* True for any options that were explicitly set. */
268struct {
df01da37 269 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 270 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
271 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
272 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
273 bool spe; /* True if -mspe= was used. */
274 bool float_gprs; /* True if -mfloat-gprs= was used. */
275 bool isel; /* True if -misel was used. */
276 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 277 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 278 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
279} rs6000_explicit_options;
280
a3170dc6
AH
281struct builtin_description
282{
283 /* mask is not const because we're going to alter it below. This
284 nonsense will go away when we rewrite the -march infrastructure
285 to give us more target flag bits. */
286 unsigned int mask;
287 const enum insn_code icode;
288 const char *const name;
289 const enum rs6000_builtins code;
290};
8b897cfa
RS
291\f
292/* Target cpu costs. */
293
294struct processor_costs {
c4ad648e 295 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
296 const int mulsi_const; /* cost of SImode multiplication by constant. */
297 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
298 const int muldi; /* cost of DImode multiplication. */
299 const int divsi; /* cost of SImode division. */
300 const int divdi; /* cost of DImode division. */
301 const int fp; /* cost of simple SFmode and DFmode insns. */
302 const int dmul; /* cost of DFmode multiplication (and fmadd). */
303 const int sdiv; /* cost of SFmode division (fdivs). */
304 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
305 const int cache_line_size; /* cache line size in bytes. */
306 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
307 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
308 const int simultaneous_prefetches; /* number of parallel prefetch
309 operations. */
8b897cfa
RS
310};
311
312const struct processor_costs *rs6000_cost;
313
314/* Processor costs (relative to an add) */
315
316/* Instruction size costs on 32bit processors. */
317static const
318struct processor_costs size32_cost = {
06a67bdd
RS
319 COSTS_N_INSNS (1), /* mulsi */
320 COSTS_N_INSNS (1), /* mulsi_const */
321 COSTS_N_INSNS (1), /* mulsi_const9 */
322 COSTS_N_INSNS (1), /* muldi */
323 COSTS_N_INSNS (1), /* divsi */
324 COSTS_N_INSNS (1), /* divdi */
325 COSTS_N_INSNS (1), /* fp */
326 COSTS_N_INSNS (1), /* dmul */
327 COSTS_N_INSNS (1), /* sdiv */
328 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
329 32,
330 0,
331 0,
5f732aba 332 0,
8b897cfa
RS
333};
334
335/* Instruction size costs on 64bit processors. */
336static const
337struct processor_costs size64_cost = {
06a67bdd
RS
338 COSTS_N_INSNS (1), /* mulsi */
339 COSTS_N_INSNS (1), /* mulsi_const */
340 COSTS_N_INSNS (1), /* mulsi_const9 */
341 COSTS_N_INSNS (1), /* muldi */
342 COSTS_N_INSNS (1), /* divsi */
343 COSTS_N_INSNS (1), /* divdi */
344 COSTS_N_INSNS (1), /* fp */
345 COSTS_N_INSNS (1), /* dmul */
346 COSTS_N_INSNS (1), /* sdiv */
347 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
348 128,
349 0,
350 0,
5f732aba 351 0,
8b897cfa
RS
352};
353
354/* Instruction costs on RIOS1 processors. */
355static const
356struct processor_costs rios1_cost = {
06a67bdd
RS
357 COSTS_N_INSNS (5), /* mulsi */
358 COSTS_N_INSNS (4), /* mulsi_const */
359 COSTS_N_INSNS (3), /* mulsi_const9 */
360 COSTS_N_INSNS (5), /* muldi */
361 COSTS_N_INSNS (19), /* divsi */
362 COSTS_N_INSNS (19), /* divdi */
363 COSTS_N_INSNS (2), /* fp */
364 COSTS_N_INSNS (2), /* dmul */
365 COSTS_N_INSNS (19), /* sdiv */
366 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 367 128, /* cache line size */
5f732aba
DE
368 64, /* l1 cache */
369 512, /* l2 cache */
0b11da67 370 0, /* streams */
8b897cfa
RS
371};
372
373/* Instruction costs on RIOS2 processors. */
374static const
375struct processor_costs rios2_cost = {
06a67bdd
RS
376 COSTS_N_INSNS (2), /* mulsi */
377 COSTS_N_INSNS (2), /* mulsi_const */
378 COSTS_N_INSNS (2), /* mulsi_const9 */
379 COSTS_N_INSNS (2), /* muldi */
380 COSTS_N_INSNS (13), /* divsi */
381 COSTS_N_INSNS (13), /* divdi */
382 COSTS_N_INSNS (2), /* fp */
383 COSTS_N_INSNS (2), /* dmul */
384 COSTS_N_INSNS (17), /* sdiv */
385 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 386 256, /* cache line size */
5f732aba
DE
387 256, /* l1 cache */
388 1024, /* l2 cache */
0b11da67 389 0, /* streams */
8b897cfa
RS
390};
391
392/* Instruction costs on RS64A processors. */
393static const
394struct processor_costs rs64a_cost = {
06a67bdd
RS
395 COSTS_N_INSNS (20), /* mulsi */
396 COSTS_N_INSNS (12), /* mulsi_const */
397 COSTS_N_INSNS (8), /* mulsi_const9 */
398 COSTS_N_INSNS (34), /* muldi */
399 COSTS_N_INSNS (65), /* divsi */
400 COSTS_N_INSNS (67), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (4), /* dmul */
403 COSTS_N_INSNS (31), /* sdiv */
404 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 405 128, /* cache line size */
5f732aba
DE
406 128, /* l1 cache */
407 2048, /* l2 cache */
0b11da67 408 1, /* streams */
8b897cfa
RS
409};
410
411/* Instruction costs on MPCCORE processors. */
412static const
413struct processor_costs mpccore_cost = {
06a67bdd
RS
414 COSTS_N_INSNS (2), /* mulsi */
415 COSTS_N_INSNS (2), /* mulsi_const */
416 COSTS_N_INSNS (2), /* mulsi_const9 */
417 COSTS_N_INSNS (2), /* muldi */
418 COSTS_N_INSNS (6), /* divsi */
419 COSTS_N_INSNS (6), /* divdi */
420 COSTS_N_INSNS (4), /* fp */
421 COSTS_N_INSNS (5), /* dmul */
422 COSTS_N_INSNS (10), /* sdiv */
423 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 424 32, /* cache line size */
5f732aba
DE
425 4, /* l1 cache */
426 16, /* l2 cache */
0b11da67 427 1, /* streams */
8b897cfa
RS
428};
429
430/* Instruction costs on PPC403 processors. */
431static const
432struct processor_costs ppc403_cost = {
06a67bdd
RS
433 COSTS_N_INSNS (4), /* mulsi */
434 COSTS_N_INSNS (4), /* mulsi_const */
435 COSTS_N_INSNS (4), /* mulsi_const9 */
436 COSTS_N_INSNS (4), /* muldi */
437 COSTS_N_INSNS (33), /* divsi */
438 COSTS_N_INSNS (33), /* divdi */
439 COSTS_N_INSNS (11), /* fp */
440 COSTS_N_INSNS (11), /* dmul */
441 COSTS_N_INSNS (11), /* sdiv */
442 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 443 32, /* cache line size */
5f732aba
DE
444 4, /* l1 cache */
445 16, /* l2 cache */
0b11da67 446 1, /* streams */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC405 processors. */
450static const
451struct processor_costs ppc405_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (3), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (35), /* divsi */
457 COSTS_N_INSNS (35), /* divdi */
458 COSTS_N_INSNS (11), /* fp */
459 COSTS_N_INSNS (11), /* dmul */
460 COSTS_N_INSNS (11), /* sdiv */
461 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 462 32, /* cache line size */
5f732aba
DE
463 16, /* l1 cache */
464 128, /* l2 cache */
0b11da67 465 1, /* streams */
8b897cfa
RS
466};
467
468/* Instruction costs on PPC440 processors. */
469static const
470struct processor_costs ppc440_cost = {
06a67bdd
RS
471 COSTS_N_INSNS (3), /* mulsi */
472 COSTS_N_INSNS (2), /* mulsi_const */
473 COSTS_N_INSNS (2), /* mulsi_const9 */
474 COSTS_N_INSNS (3), /* muldi */
475 COSTS_N_INSNS (34), /* divsi */
476 COSTS_N_INSNS (34), /* divdi */
477 COSTS_N_INSNS (5), /* fp */
478 COSTS_N_INSNS (5), /* dmul */
479 COSTS_N_INSNS (19), /* sdiv */
480 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 481 32, /* cache line size */
5f732aba
DE
482 32, /* l1 cache */
483 256, /* l2 cache */
0b11da67 484 1, /* streams */
8b897cfa
RS
485};
486
487/* Instruction costs on PPC601 processors. */
488static const
489struct processor_costs ppc601_cost = {
06a67bdd
RS
490 COSTS_N_INSNS (5), /* mulsi */
491 COSTS_N_INSNS (5), /* mulsi_const */
492 COSTS_N_INSNS (5), /* mulsi_const9 */
493 COSTS_N_INSNS (5), /* muldi */
494 COSTS_N_INSNS (36), /* divsi */
495 COSTS_N_INSNS (36), /* divdi */
496 COSTS_N_INSNS (4), /* fp */
497 COSTS_N_INSNS (5), /* dmul */
498 COSTS_N_INSNS (17), /* sdiv */
499 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 500 32, /* cache line size */
5f732aba
DE
501 32, /* l1 cache */
502 256, /* l2 cache */
0b11da67 503 1, /* streams */
8b897cfa
RS
504};
505
506/* Instruction costs on PPC603 processors. */
507static const
508struct processor_costs ppc603_cost = {
06a67bdd
RS
509 COSTS_N_INSNS (5), /* mulsi */
510 COSTS_N_INSNS (3), /* mulsi_const */
511 COSTS_N_INSNS (2), /* mulsi_const9 */
512 COSTS_N_INSNS (5), /* muldi */
513 COSTS_N_INSNS (37), /* divsi */
514 COSTS_N_INSNS (37), /* divdi */
515 COSTS_N_INSNS (3), /* fp */
516 COSTS_N_INSNS (4), /* dmul */
517 COSTS_N_INSNS (18), /* sdiv */
518 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 519 32, /* cache line size */
5f732aba
DE
520 8, /* l1 cache */
521 64, /* l2 cache */
0b11da67 522 1, /* streams */
8b897cfa
RS
523};
524
525/* Instruction costs on PPC604 processors. */
526static const
527struct processor_costs ppc604_cost = {
06a67bdd
RS
528 COSTS_N_INSNS (4), /* mulsi */
529 COSTS_N_INSNS (4), /* mulsi_const */
530 COSTS_N_INSNS (4), /* mulsi_const9 */
531 COSTS_N_INSNS (4), /* muldi */
532 COSTS_N_INSNS (20), /* divsi */
533 COSTS_N_INSNS (20), /* divdi */
534 COSTS_N_INSNS (3), /* fp */
535 COSTS_N_INSNS (3), /* dmul */
536 COSTS_N_INSNS (18), /* sdiv */
537 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 538 32, /* cache line size */
5f732aba
DE
539 16, /* l1 cache */
540 512, /* l2 cache */
0b11da67 541 1, /* streams */
8b897cfa
RS
542};
543
544/* Instruction costs on PPC604e processors. */
545static const
546struct processor_costs ppc604e_cost = {
06a67bdd
RS
547 COSTS_N_INSNS (2), /* mulsi */
548 COSTS_N_INSNS (2), /* mulsi_const */
549 COSTS_N_INSNS (2), /* mulsi_const9 */
550 COSTS_N_INSNS (2), /* muldi */
551 COSTS_N_INSNS (20), /* divsi */
552 COSTS_N_INSNS (20), /* divdi */
553 COSTS_N_INSNS (3), /* fp */
554 COSTS_N_INSNS (3), /* dmul */
555 COSTS_N_INSNS (18), /* sdiv */
556 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 557 32, /* cache line size */
5f732aba
DE
558 32, /* l1 cache */
559 1024, /* l2 cache */
0b11da67 560 1, /* streams */
8b897cfa
RS
561};
562
f0517163 563/* Instruction costs on PPC620 processors. */
8b897cfa
RS
564static const
565struct processor_costs ppc620_cost = {
06a67bdd
RS
566 COSTS_N_INSNS (5), /* mulsi */
567 COSTS_N_INSNS (4), /* mulsi_const */
568 COSTS_N_INSNS (3), /* mulsi_const9 */
569 COSTS_N_INSNS (7), /* muldi */
570 COSTS_N_INSNS (21), /* divsi */
571 COSTS_N_INSNS (37), /* divdi */
572 COSTS_N_INSNS (3), /* fp */
573 COSTS_N_INSNS (3), /* dmul */
574 COSTS_N_INSNS (18), /* sdiv */
575 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 576 128, /* cache line size */
5f732aba
DE
577 32, /* l1 cache */
578 1024, /* l2 cache */
0b11da67 579 1, /* streams */
f0517163
RS
580};
581
582/* Instruction costs on PPC630 processors. */
583static const
584struct processor_costs ppc630_cost = {
06a67bdd
RS
585 COSTS_N_INSNS (5), /* mulsi */
586 COSTS_N_INSNS (4), /* mulsi_const */
587 COSTS_N_INSNS (3), /* mulsi_const9 */
588 COSTS_N_INSNS (7), /* muldi */
589 COSTS_N_INSNS (21), /* divsi */
590 COSTS_N_INSNS (37), /* divdi */
591 COSTS_N_INSNS (3), /* fp */
592 COSTS_N_INSNS (3), /* dmul */
593 COSTS_N_INSNS (17), /* sdiv */
594 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 595 128, /* cache line size */
5f732aba
DE
596 64, /* l1 cache */
597 1024, /* l2 cache */
0b11da67 598 1, /* streams */
8b897cfa
RS
599};
600
d296e02e
AP
601/* Instruction costs on Cell processor. */
602/* COSTS_N_INSNS (1) ~ one add. */
603static const
604struct processor_costs ppccell_cost = {
605 COSTS_N_INSNS (9/2)+2, /* mulsi */
606 COSTS_N_INSNS (6/2), /* mulsi_const */
607 COSTS_N_INSNS (6/2), /* mulsi_const9 */
608 COSTS_N_INSNS (15/2)+2, /* muldi */
609 COSTS_N_INSNS (38/2), /* divsi */
610 COSTS_N_INSNS (70/2), /* divdi */
611 COSTS_N_INSNS (10/2), /* fp */
612 COSTS_N_INSNS (10/2), /* dmul */
613 COSTS_N_INSNS (74/2), /* sdiv */
614 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 615 128, /* cache line size */
5f732aba
DE
616 32, /* l1 cache */
617 512, /* l2 cache */
618 6, /* streams */
d296e02e
AP
619};
620
8b897cfa
RS
621/* Instruction costs on PPC750 and PPC7400 processors. */
622static const
623struct processor_costs ppc750_cost = {
06a67bdd
RS
624 COSTS_N_INSNS (5), /* mulsi */
625 COSTS_N_INSNS (3), /* mulsi_const */
626 COSTS_N_INSNS (2), /* mulsi_const9 */
627 COSTS_N_INSNS (5), /* muldi */
628 COSTS_N_INSNS (17), /* divsi */
629 COSTS_N_INSNS (17), /* divdi */
630 COSTS_N_INSNS (3), /* fp */
631 COSTS_N_INSNS (3), /* dmul */
632 COSTS_N_INSNS (17), /* sdiv */
633 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 634 32, /* cache line size */
5f732aba
DE
635 32, /* l1 cache */
636 512, /* l2 cache */
0b11da67 637 1, /* streams */
8b897cfa
RS
638};
639
640/* Instruction costs on PPC7450 processors. */
641static const
642struct processor_costs ppc7450_cost = {
06a67bdd
RS
643 COSTS_N_INSNS (4), /* mulsi */
644 COSTS_N_INSNS (3), /* mulsi_const */
645 COSTS_N_INSNS (3), /* mulsi_const9 */
646 COSTS_N_INSNS (4), /* muldi */
647 COSTS_N_INSNS (23), /* divsi */
648 COSTS_N_INSNS (23), /* divdi */
649 COSTS_N_INSNS (5), /* fp */
650 COSTS_N_INSNS (5), /* dmul */
651 COSTS_N_INSNS (21), /* sdiv */
652 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 653 32, /* cache line size */
5f732aba
DE
654 32, /* l1 cache */
655 1024, /* l2 cache */
0b11da67 656 1, /* streams */
8b897cfa 657};
a3170dc6 658
8b897cfa
RS
659/* Instruction costs on PPC8540 processors. */
660static const
661struct processor_costs ppc8540_cost = {
06a67bdd
RS
662 COSTS_N_INSNS (4), /* mulsi */
663 COSTS_N_INSNS (4), /* mulsi_const */
664 COSTS_N_INSNS (4), /* mulsi_const9 */
665 COSTS_N_INSNS (4), /* muldi */
666 COSTS_N_INSNS (19), /* divsi */
667 COSTS_N_INSNS (19), /* divdi */
668 COSTS_N_INSNS (4), /* fp */
669 COSTS_N_INSNS (4), /* dmul */
670 COSTS_N_INSNS (29), /* sdiv */
671 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 672 32, /* cache line size */
5f732aba
DE
673 32, /* l1 cache */
674 256, /* l2 cache */
0b11da67 675 1, /* prefetch streams /*/
8b897cfa
RS
676};
677
fa41c305
EW
678/* Instruction costs on E300C2 and E300C3 cores. */
679static const
680struct processor_costs ppce300c2c3_cost = {
681 COSTS_N_INSNS (4), /* mulsi */
682 COSTS_N_INSNS (4), /* mulsi_const */
683 COSTS_N_INSNS (4), /* mulsi_const9 */
684 COSTS_N_INSNS (4), /* muldi */
685 COSTS_N_INSNS (19), /* divsi */
686 COSTS_N_INSNS (19), /* divdi */
687 COSTS_N_INSNS (3), /* fp */
688 COSTS_N_INSNS (4), /* dmul */
689 COSTS_N_INSNS (18), /* sdiv */
690 COSTS_N_INSNS (33), /* ddiv */
642639ce 691 32,
a19b7d46
EW
692 16, /* l1 cache */
693 16, /* l2 cache */
642639ce 694 1, /* prefetch streams /*/
fa41c305
EW
695};
696
edae5fe3
DE
697/* Instruction costs on PPCE500MC processors. */
698static const
699struct processor_costs ppce500mc_cost = {
700 COSTS_N_INSNS (4), /* mulsi */
701 COSTS_N_INSNS (4), /* mulsi_const */
702 COSTS_N_INSNS (4), /* mulsi_const9 */
703 COSTS_N_INSNS (4), /* muldi */
704 COSTS_N_INSNS (14), /* divsi */
705 COSTS_N_INSNS (14), /* divdi */
706 COSTS_N_INSNS (8), /* fp */
707 COSTS_N_INSNS (10), /* dmul */
708 COSTS_N_INSNS (36), /* sdiv */
709 COSTS_N_INSNS (66), /* ddiv */
710 64, /* cache line size */
711 32, /* l1 cache */
712 128, /* l2 cache */
713 1, /* prefetch streams /*/
714};
715
8b897cfa
RS
716/* Instruction costs on POWER4 and POWER5 processors. */
717static const
718struct processor_costs power4_cost = {
06a67bdd
RS
719 COSTS_N_INSNS (3), /* mulsi */
720 COSTS_N_INSNS (2), /* mulsi_const */
721 COSTS_N_INSNS (2), /* mulsi_const9 */
722 COSTS_N_INSNS (4), /* muldi */
723 COSTS_N_INSNS (18), /* divsi */
724 COSTS_N_INSNS (34), /* divdi */
725 COSTS_N_INSNS (3), /* fp */
726 COSTS_N_INSNS (3), /* dmul */
727 COSTS_N_INSNS (17), /* sdiv */
728 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 729 128, /* cache line size */
5f732aba
DE
730 32, /* l1 cache */
731 1024, /* l2 cache */
0b11da67 732 8, /* prefetch streams /*/
8b897cfa
RS
733};
734
44cd321e
PS
735/* Instruction costs on POWER6 processors. */
736static const
737struct processor_costs power6_cost = {
738 COSTS_N_INSNS (8), /* mulsi */
739 COSTS_N_INSNS (8), /* mulsi_const */
740 COSTS_N_INSNS (8), /* mulsi_const9 */
741 COSTS_N_INSNS (8), /* muldi */
742 COSTS_N_INSNS (22), /* divsi */
743 COSTS_N_INSNS (28), /* divdi */
744 COSTS_N_INSNS (3), /* fp */
745 COSTS_N_INSNS (3), /* dmul */
746 COSTS_N_INSNS (13), /* sdiv */
747 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 748 128, /* cache line size */
5f732aba
DE
749 64, /* l1 cache */
750 2048, /* l2 cache */
0b11da67 751 16, /* prefetch streams */
44cd321e
PS
752};
753
8b897cfa 754\f
a2369ed3 755static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 756static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 757static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
758static void rs6000_emit_stack_tie (void);
759static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 760static bool spe_func_has_64bit_regs_p (void);
b20a9cca 761static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 762 int, HOST_WIDE_INT);
a2369ed3 763static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 764static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
765static unsigned rs6000_hash_constant (rtx);
766static unsigned toc_hash_function (const void *);
767static int toc_hash_eq (const void *, const void *);
768static int constant_pool_expr_1 (rtx, int *, int *);
769static bool constant_pool_expr_p (rtx);
d04b6e6e 770static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
771static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
772static struct machine_function * rs6000_init_machine_status (void);
773static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 774static bool no_global_regs_above (int, bool);
5add3202 775#ifdef HAVE_GAS_HIDDEN
a2369ed3 776static void rs6000_assemble_visibility (tree, int);
5add3202 777#endif
a2369ed3
DJ
778static int rs6000_ra_ever_killed (void);
779static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 780static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 781static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 782static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 783static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 784static const char *rs6000_mangle_type (const_tree);
b86fe7b4 785extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 786static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
787static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
788static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
789static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
790 enum machine_mode, bool, bool, bool);
52ff33d0 791static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
792static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
793static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
794static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
795static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
796static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
797 tree);
a2369ed3 798static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 799static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 800static void rs6000_file_start (void);
7c262518 801#if TARGET_ELF
9b580a0b 802static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
803static void rs6000_elf_asm_out_constructor (rtx, int);
804static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 805static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 806static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
807static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
808 unsigned HOST_WIDE_INT);
a56d7372 809static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 810 ATTRIBUTE_UNUSED;
7c262518 811#endif
3101faab 812static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
813static void rs6000_alloc_sdmode_stack_slot (void);
814static void rs6000_instantiate_decls (void);
cbaaba19 815#if TARGET_XCOFF
0d5817b2 816static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 817static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 818static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 819static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 820static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 821static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 822 unsigned HOST_WIDE_INT);
d6b5193b
RS
823static void rs6000_xcoff_unique_section (tree, int);
824static section *rs6000_xcoff_select_rtx_section
825 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
826static const char * rs6000_xcoff_strip_name_encoding (const char *);
827static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
828static void rs6000_xcoff_file_start (void);
829static void rs6000_xcoff_file_end (void);
f1384257 830#endif
a2369ed3
DJ
831static int rs6000_variable_issue (FILE *, int, rtx, int);
832static bool rs6000_rtx_costs (rtx, int, int, int *);
833static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 834static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 835static bool is_microcoded_insn (rtx);
d296e02e 836static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
837static bool is_cracked_insn (rtx);
838static bool is_branch_slot_insn (rtx);
44cd321e 839static bool is_load_insn (rtx);
e3a0e200 840static rtx get_store_dest (rtx pat);
44cd321e
PS
841static bool is_store_insn (rtx);
842static bool set_to_load_agen (rtx,rtx);
982afe02 843static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
844static int rs6000_adjust_priority (rtx, int);
845static int rs6000_issue_rate (void);
b198261f 846static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
847static rtx get_next_active_insn (rtx, rtx);
848static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
849static bool insn_must_be_first_in_group (rtx);
850static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
851static bool is_costly_group (rtx *, rtx);
852static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
853static int redefine_groups (FILE *, int, rtx, rtx);
854static int pad_groups (FILE *, int, rtx, rtx);
855static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
856static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
857static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 858static int rs6000_use_sched_lookahead (void);
d296e02e 859static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 860static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 861static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
862static tree rs6000_builtin_mul_widen_even (tree);
863static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 864static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 865
58646b77 866static void def_builtin (int, const char *, tree, int);
3101faab 867static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
868static void rs6000_init_builtins (void);
869static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
870static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
871static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
872static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
873static void altivec_init_builtins (void);
874static void rs6000_common_init_builtins (void);
c15c90bb 875static void rs6000_init_libfuncs (void);
a2369ed3 876
96038623
DE
877static void paired_init_builtins (void);
878static rtx paired_expand_builtin (tree, rtx, bool *);
879static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
880static rtx paired_expand_stv_builtin (enum insn_code, tree);
881static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
882
b20a9cca
AM
883static void enable_mask_for_builtins (struct builtin_description *, int,
884 enum rs6000_builtins,
885 enum rs6000_builtins);
7c62e993 886static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
887static void spe_init_builtins (void);
888static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 889static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
890static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
891static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
892static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
893static rs6000_stack_t *rs6000_stack_info (void);
894static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
895
896static rtx altivec_expand_builtin (tree, rtx, bool *);
897static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
898static rtx altivec_expand_st_builtin (tree, rtx, bool *);
899static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
900static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 901static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 902 const char *, tree, rtx);
b4a62fa0 903static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 904static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
905static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
906static rtx altivec_expand_vec_set_builtin (tree);
907static rtx altivec_expand_vec_ext_builtin (tree, rtx);
908static int get_element_number (tree, tree);
78f5898b 909static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 910static void rs6000_parse_tls_size_option (void);
5da702b1 911static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
912static int first_altivec_reg_to_save (void);
913static unsigned int compute_vrsave_mask (void);
9390387d 914static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
915static void is_altivec_return_reg (rtx, void *);
916static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
917int easy_vector_constant (rtx, enum machine_mode);
3101faab 918static bool rs6000_is_opaque_type (const_tree);
a2369ed3 919static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 920static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 921static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 922static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
923static rtx rs6000_tls_get_addr (void);
924static rtx rs6000_got_sym (void);
9390387d 925static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
926static const char *rs6000_get_some_local_dynamic_name (void);
927static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 928static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 929static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 930 enum machine_mode, tree);
0b5383eb
DJ
931static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
932 HOST_WIDE_INT);
933static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
934 tree, HOST_WIDE_INT);
935static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
936 HOST_WIDE_INT,
937 rtx[], int *);
938static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
939 const_tree, HOST_WIDE_INT,
940 rtx[], int *);
941static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 942static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 943static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
944static void setup_incoming_varargs (CUMULATIVE_ARGS *,
945 enum machine_mode, tree,
946 int *, int);
8cd5a4e0 947static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 948 const_tree, bool);
78a52f11
RH
949static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
950 tree, bool);
3101faab 951static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
952#if TARGET_MACHO
953static void macho_branch_islands (void);
efdba735
SH
954static int no_previous_def (tree function_name);
955static tree get_prev_label (tree function_name);
c4e18b1c 956static void rs6000_darwin_file_start (void);
efdba735
SH
957#endif
958
c35d187f 959static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 960static void rs6000_va_start (tree, rtx);
23a60a04 961static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 962static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 963static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 964static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 965static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 966 enum machine_mode);
94ff898d 967static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
968 enum machine_mode);
969static int get_vsel_insn (enum machine_mode);
970static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 971static tree rs6000_stack_protect_fail (void);
21213b4c
DP
972
973const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
974static enum machine_mode rs6000_eh_return_filter_mode (void);
975
17211ab5
GK
976/* Hash table stuff for keeping track of TOC entries. */
977
978struct toc_hash_struct GTY(())
979{
980 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
981 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
982 rtx key;
983 enum machine_mode key_mode;
984 int labelno;
985};
986
987static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
988\f
989/* Default register names. */
990char rs6000_reg_names[][8] =
991{
802a0058
MM
992 "0", "1", "2", "3", "4", "5", "6", "7",
993 "8", "9", "10", "11", "12", "13", "14", "15",
994 "16", "17", "18", "19", "20", "21", "22", "23",
995 "24", "25", "26", "27", "28", "29", "30", "31",
996 "0", "1", "2", "3", "4", "5", "6", "7",
997 "8", "9", "10", "11", "12", "13", "14", "15",
998 "16", "17", "18", "19", "20", "21", "22", "23",
999 "24", "25", "26", "27", "28", "29", "30", "31",
1000 "mq", "lr", "ctr","ap",
1001 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1002 "xer",
1003 /* AltiVec registers. */
0cd5e3a1
AH
1004 "0", "1", "2", "3", "4", "5", "6", "7",
1005 "8", "9", "10", "11", "12", "13", "14", "15",
1006 "16", "17", "18", "19", "20", "21", "22", "23",
1007 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1008 "vrsave", "vscr",
1009 /* SPE registers. */
7d5175e1
JJ
1010 "spe_acc", "spefscr",
1011 /* Soft frame pointer. */
1012 "sfp"
c81bebd7
MM
1013};
1014
1015#ifdef TARGET_REGNAMES
8b60264b 1016static const char alt_reg_names[][8] =
c81bebd7 1017{
802a0058
MM
1018 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1019 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1020 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1021 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1022 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1023 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1024 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1025 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1026 "mq", "lr", "ctr", "ap",
1027 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1028 "xer",
59a4c851 1029 /* AltiVec registers. */
0ac081f6 1030 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1031 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1032 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1033 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1034 "vrsave", "vscr",
1035 /* SPE registers. */
7d5175e1
JJ
1036 "spe_acc", "spefscr",
1037 /* Soft frame pointer. */
1038 "sfp"
c81bebd7
MM
1039};
1040#endif
9878760c 1041\f
daf11973
MM
1042#ifndef MASK_STRICT_ALIGN
1043#define MASK_STRICT_ALIGN 0
1044#endif
ffcfcb5f
AM
1045#ifndef TARGET_PROFILE_KERNEL
1046#define TARGET_PROFILE_KERNEL 0
1047#endif
3961e8fe
RH
1048
1049/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1050#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1051\f
1052/* Initialize the GCC target structure. */
91d231cb
JM
1053#undef TARGET_ATTRIBUTE_TABLE
1054#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1055#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1056#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1057
301d03af
RS
1058#undef TARGET_ASM_ALIGNED_DI_OP
1059#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1060
1061/* Default unaligned ops are only provided for ELF. Find the ops needed
1062 for non-ELF systems. */
1063#ifndef OBJECT_FORMAT_ELF
cbaaba19 1064#if TARGET_XCOFF
ae6c1efd 1065/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1066 64-bit targets. */
1067#undef TARGET_ASM_UNALIGNED_HI_OP
1068#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1069#undef TARGET_ASM_UNALIGNED_SI_OP
1070#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1071#undef TARGET_ASM_UNALIGNED_DI_OP
1072#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1073#else
1074/* For Darwin. */
1075#undef TARGET_ASM_UNALIGNED_HI_OP
1076#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1077#undef TARGET_ASM_UNALIGNED_SI_OP
1078#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1079#undef TARGET_ASM_UNALIGNED_DI_OP
1080#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1081#undef TARGET_ASM_ALIGNED_DI_OP
1082#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1083#endif
1084#endif
1085
1086/* This hook deals with fixups for relocatable code and DI-mode objects
1087 in 64-bit code. */
1088#undef TARGET_ASM_INTEGER
1089#define TARGET_ASM_INTEGER rs6000_assemble_integer
1090
93638d7a
AM
1091#ifdef HAVE_GAS_HIDDEN
1092#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1093#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1094#endif
1095
c4501e62
JJ
1096#undef TARGET_HAVE_TLS
1097#define TARGET_HAVE_TLS HAVE_AS_TLS
1098
1099#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1100#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1101
08c148a8
NB
1102#undef TARGET_ASM_FUNCTION_PROLOGUE
1103#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1104#undef TARGET_ASM_FUNCTION_EPILOGUE
1105#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1106
b54cf83a
DE
1107#undef TARGET_SCHED_VARIABLE_ISSUE
1108#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1109
c237e94a
ZW
1110#undef TARGET_SCHED_ISSUE_RATE
1111#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1112#undef TARGET_SCHED_ADJUST_COST
1113#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1114#undef TARGET_SCHED_ADJUST_PRIORITY
1115#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1116#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1117#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1118#undef TARGET_SCHED_INIT
1119#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1120#undef TARGET_SCHED_FINISH
1121#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1122#undef TARGET_SCHED_REORDER
1123#define TARGET_SCHED_REORDER rs6000_sched_reorder
1124#undef TARGET_SCHED_REORDER2
1125#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1126
be12c2b0
VM
1127#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1128#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1129
d296e02e
AP
1130#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1131#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1132
7ccf35ed
DN
1133#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1134#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1135#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1136#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1137#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1138#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1139#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1140#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1141
5b900a4c
DN
1142#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1143#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1144
0ac081f6
AH
1145#undef TARGET_INIT_BUILTINS
1146#define TARGET_INIT_BUILTINS rs6000_init_builtins
1147
1148#undef TARGET_EXPAND_BUILTIN
1149#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1150
608063c3
JB
1151#undef TARGET_MANGLE_TYPE
1152#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1153
c15c90bb
ZW
1154#undef TARGET_INIT_LIBFUNCS
1155#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1156
f1384257 1157#if TARGET_MACHO
0e5dbd9b 1158#undef TARGET_BINDS_LOCAL_P
31920d83 1159#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1160#endif
0e5dbd9b 1161
77ccdfed
EC
1162#undef TARGET_MS_BITFIELD_LAYOUT_P
1163#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1164
3961e8fe
RH
1165#undef TARGET_ASM_OUTPUT_MI_THUNK
1166#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1167
3961e8fe 1168#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1169#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1170
4977bab6
ZW
1171#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1172#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1173
2e3f0db6
DJ
1174#undef TARGET_INVALID_WITHIN_DOLOOP
1175#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1176
3c50106f
RH
1177#undef TARGET_RTX_COSTS
1178#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1179#undef TARGET_ADDRESS_COST
1180#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1181
c8e4f0e9 1182#undef TARGET_VECTOR_OPAQUE_P
58646b77 1183#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1184
96714395
AH
1185#undef TARGET_DWARF_REGISTER_SPAN
1186#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1187
37ea0b7e
JM
1188#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1189#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1190
c6e8c921
GK
1191/* On rs6000, function arguments are promoted, as are function return
1192 values. */
1193#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1194#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1195#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1196#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1197
c6e8c921
GK
1198#undef TARGET_RETURN_IN_MEMORY
1199#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1200
1201#undef TARGET_SETUP_INCOMING_VARARGS
1202#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1203
1204/* Always strict argument naming on rs6000. */
1205#undef TARGET_STRICT_ARGUMENT_NAMING
1206#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1207#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1208#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1209#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1210#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1211#undef TARGET_MUST_PASS_IN_STACK
1212#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1213#undef TARGET_PASS_BY_REFERENCE
1214#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1215#undef TARGET_ARG_PARTIAL_BYTES
1216#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1217
c35d187f
RH
1218#undef TARGET_BUILD_BUILTIN_VA_LIST
1219#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1220
d7bd8aeb
JJ
1221#undef TARGET_EXPAND_BUILTIN_VA_START
1222#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1223
cd3ce9b4
JM
1224#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1225#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1226
93f90be6
FJ
1227#undef TARGET_EH_RETURN_FILTER_MODE
1228#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1229
00b79d54
BE
1230#undef TARGET_SCALAR_MODE_SUPPORTED_P
1231#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1232
f676971a
EC
1233#undef TARGET_VECTOR_MODE_SUPPORTED_P
1234#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1235
4d3e6fae
FJ
1236#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1237#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1238
78f5898b
AH
1239#undef TARGET_HANDLE_OPTION
1240#define TARGET_HANDLE_OPTION rs6000_handle_option
1241
1242#undef TARGET_DEFAULT_TARGET_FLAGS
1243#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1244 (TARGET_DEFAULT)
78f5898b 1245
3aebbe5f
JJ
1246#undef TARGET_STACK_PROTECT_FAIL
1247#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1248
445cf5eb
JM
1249/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1250 The PowerPC architecture requires only weak consistency among
1251 processors--that is, memory accesses between processors need not be
1252 sequentially consistent and memory accesses among processors can occur
1253 in any order. The ability to order memory accesses weakly provides
1254 opportunities for more efficient use of the system bus. Unless a
1255 dependency exists, the 604e allows read operations to precede store
1256 operations. */
1257#undef TARGET_RELAXED_ORDERING
1258#define TARGET_RELAXED_ORDERING true
1259
fdbe66f2
EB
1260#ifdef HAVE_AS_TLS
1261#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1262#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1263#endif
1264
aacd3885
RS
1265/* Use a 32-bit anchor range. This leads to sequences like:
1266
1267 addis tmp,anchor,high
1268 add dest,tmp,low
1269
1270 where tmp itself acts as an anchor, and can be shared between
1271 accesses to the same 64k page. */
1272#undef TARGET_MIN_ANCHOR_OFFSET
1273#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1274#undef TARGET_MAX_ANCHOR_OFFSET
1275#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1276#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1277#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1278
9c78b944
DE
1279#undef TARGET_BUILTIN_RECIPROCAL
1280#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1281
e41b2a33
PB
1282#undef TARGET_EXPAND_TO_RTL_HOOK
1283#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1284
1285#undef TARGET_INSTANTIATE_DECLS
1286#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1287
f6897b10 1288struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1289\f
0d1fbc8c
AH
1290
1291/* Value is 1 if hard register REGNO can hold a value of machine-mode
1292 MODE. */
1293static int
1294rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1295{
1296 /* The GPRs can hold any mode, but values bigger than one register
1297 cannot go past R31. */
1298 if (INT_REGNO_P (regno))
1299 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1300
a5a97921 1301 /* The float registers can only hold floating modes and DImode.
7393f7f8 1302 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1303 if (FP_REGNO_P (regno))
1304 return
96038623 1305 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1306 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1307 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1308 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1309 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1310 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1311 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1312
1313 /* The CR register can only hold CC modes. */
1314 if (CR_REGNO_P (regno))
1315 return GET_MODE_CLASS (mode) == MODE_CC;
1316
1317 if (XER_REGNO_P (regno))
1318 return mode == PSImode;
1319
1320 /* AltiVec only in AldyVec registers. */
1321 if (ALTIVEC_REGNO_P (regno))
1322 return ALTIVEC_VECTOR_MODE (mode);
1323
1324 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1325 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1326 return 1;
1327
1328 /* We cannot put TImode anywhere except general register and it must be
1329 able to fit within the register set. */
1330
1331 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1332}
1333
1334/* Initialize rs6000_hard_regno_mode_ok_p table. */
1335static void
1336rs6000_init_hard_regno_mode_ok (void)
1337{
1338 int r, m;
1339
1340 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1341 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1342 if (rs6000_hard_regno_mode_ok (r, m))
1343 rs6000_hard_regno_mode_ok_p[m][r] = true;
1344}
1345
e4cad568
GK
1346#if TARGET_MACHO
1347/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1348
1349static void
1350darwin_rs6000_override_options (void)
1351{
1352 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1353 off. */
1354 rs6000_altivec_abi = 1;
1355 TARGET_ALTIVEC_VRSAVE = 1;
1356 if (DEFAULT_ABI == ABI_DARWIN)
1357 {
1358 if (MACHO_DYNAMIC_NO_PIC_P)
1359 {
1360 if (flag_pic)
1361 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1362 flag_pic = 0;
1363 }
1364 else if (flag_pic == 1)
1365 {
1366 flag_pic = 2;
1367 }
1368 }
1369 if (TARGET_64BIT && ! TARGET_POWERPC64)
1370 {
1371 target_flags |= MASK_POWERPC64;
1372 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1373 }
1374 if (flag_mkernel)
1375 {
1376 rs6000_default_long_calls = 1;
1377 target_flags |= MASK_SOFT_FLOAT;
1378 }
1379
1380 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1381 Altivec. */
1382 if (!flag_mkernel && !flag_apple_kext
1383 && TARGET_64BIT
1384 && ! (target_flags_explicit & MASK_ALTIVEC))
1385 target_flags |= MASK_ALTIVEC;
1386
1387 /* Unless the user (not the configurer) has explicitly overridden
1388 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1389 G4 unless targetting the kernel. */
1390 if (!flag_mkernel
1391 && !flag_apple_kext
1392 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1393 && ! (target_flags_explicit & MASK_ALTIVEC)
1394 && ! rs6000_select[1].string)
1395 {
1396 target_flags |= MASK_ALTIVEC;
1397 }
1398}
1399#endif
1400
c1e55850
GK
1401/* If not otherwise specified by a target, make 'long double' equivalent to
1402 'double'. */
1403
1404#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1405#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1406#endif
1407
5248c961
RK
1408/* Override command line options. Mostly we process the processor
1409 type and sometimes adjust other TARGET_ options. */
1410
1411void
d779d0dc 1412rs6000_override_options (const char *default_cpu)
5248c961 1413{
c4d38ccb 1414 size_t i, j;
8e3f41e7 1415 struct rs6000_cpu_select *ptr;
66188a7e 1416 int set_masks;
5248c961 1417
66188a7e 1418 /* Simplifications for entries below. */
85638c0d 1419
66188a7e
GK
1420 enum {
1421 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1422 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1423 };
85638c0d 1424
66188a7e
GK
1425 /* This table occasionally claims that a processor does not support
1426 a particular feature even though it does, but the feature is slower
1427 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1428 complete description of the processor's support.
66188a7e
GK
1429
1430 Please keep this list in order, and don't forget to update the
1431 documentation in invoke.texi when adding a new processor or
1432 flag. */
5248c961
RK
1433 static struct ptt
1434 {
8b60264b
KG
1435 const char *const name; /* Canonical processor name. */
1436 const enum processor_type processor; /* Processor type enum value. */
1437 const int target_enable; /* Target flags to enable. */
8b60264b 1438 } const processor_target_table[]
66188a7e 1439 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1440 {"403", PROCESSOR_PPC403,
66188a7e 1441 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1442 {"405", PROCESSOR_PPC405,
716019c0
JM
1443 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1444 {"405fp", PROCESSOR_PPC405,
1445 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1446 {"440", PROCESSOR_PPC440,
716019c0
JM
1447 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1448 {"440fp", PROCESSOR_PPC440,
1449 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1450 {"464", PROCESSOR_PPC440,
1451 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1452 {"464fp", PROCESSOR_PPC440,
1453 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1454 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1455 {"601", PROCESSOR_PPC601,
66188a7e
GK
1456 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1457 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1458 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1459 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1460 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1461 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1462 {"620", PROCESSOR_PPC620,
1463 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1464 {"630", PROCESSOR_PPC630,
1465 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1466 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1467 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1468 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1469 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1471 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1472 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1473 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1474 /* 8548 has a dummy entry for now. */
a45bce6e 1475 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1476 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1477 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1478 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1479 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1480 {"970", PROCESSOR_POWER4,
66188a7e 1481 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1482 {"cell", PROCESSOR_CELL,
1483 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1484 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1485 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1486 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1487 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1488 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1489 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1490 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1491 {"power2", PROCESSOR_POWER,
1492 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1493 {"power3", PROCESSOR_PPC630,
1494 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1495 {"power4", PROCESSOR_POWER4,
fc091c8e 1496 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1497 {"power5", PROCESSOR_POWER5,
432218ba
DE
1498 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1499 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1500 {"power5+", PROCESSOR_POWER5,
1501 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1502 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1503 {"power6", PROCESSOR_POWER6,
e118597e 1504 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1505 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1506 {"power6x", PROCESSOR_POWER6,
1507 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1508 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1509 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1510 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1511 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1512 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1513 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1514 {"rios2", PROCESSOR_RIOS2,
1515 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1516 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1517 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1518 {"rs64", PROCESSOR_RS64A,
1519 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1520 };
5248c961 1521
ca7558fc 1522 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1523
66188a7e
GK
1524 /* Some OSs don't support saving the high part of 64-bit registers on
1525 context switch. Other OSs don't support saving Altivec registers.
1526 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1527 settings; if the user wants either, the user must explicitly specify
1528 them and we won't interfere with the user's specification. */
1529
1530 enum {
1531 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1532 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1533 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1534 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1535 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1536 };
0d1fbc8c
AH
1537
1538 rs6000_init_hard_regno_mode_ok ();
1539
c4ad648e 1540 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1541#ifdef OS_MISSING_POWERPC64
1542 if (OS_MISSING_POWERPC64)
1543 set_masks &= ~MASK_POWERPC64;
1544#endif
1545#ifdef OS_MISSING_ALTIVEC
1546 if (OS_MISSING_ALTIVEC)
1547 set_masks &= ~MASK_ALTIVEC;
1548#endif
1549
768875a8
AM
1550 /* Don't override by the processor default if given explicitly. */
1551 set_masks &= ~target_flags_explicit;
957211c3 1552
a4f6c312 1553 /* Identify the processor type. */
8e3f41e7 1554 rs6000_select[0].string = default_cpu;
3cb999d8 1555 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1556
b6a1cbae 1557 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1558 {
8e3f41e7
MM
1559 ptr = &rs6000_select[i];
1560 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1561 {
8e3f41e7
MM
1562 for (j = 0; j < ptt_size; j++)
1563 if (! strcmp (ptr->string, processor_target_table[j].name))
1564 {
1565 if (ptr->set_tune_p)
1566 rs6000_cpu = processor_target_table[j].processor;
1567
1568 if (ptr->set_arch_p)
1569 {
66188a7e
GK
1570 target_flags &= ~set_masks;
1571 target_flags |= (processor_target_table[j].target_enable
1572 & set_masks);
8e3f41e7
MM
1573 }
1574 break;
1575 }
1576
4406229e 1577 if (j == ptt_size)
8e3f41e7 1578 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1579 }
1580 }
8a61d227 1581
edae5fe3
DE
1582 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1583 && !rs6000_explicit_options.isel)
a3170dc6
AH
1584 rs6000_isel = 1;
1585
edae5fe3
DE
1586 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1587 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1588 {
1589 if (TARGET_ALTIVEC)
1590 error ("AltiVec not supported in this target");
1591 if (TARGET_SPE)
1592 error ("Spe not supported in this target");
1593 }
1594
dff9f1b6
DE
1595 /* If we are optimizing big endian systems for space, use the load/store
1596 multiple and string instructions. */
ef792183 1597 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1598 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1599
a4f6c312
SS
1600 /* Don't allow -mmultiple or -mstring on little endian systems
1601 unless the cpu is a 750, because the hardware doesn't support the
1602 instructions used in little endian mode, and causes an alignment
1603 trap. The 750 does not cause an alignment trap (except when the
1604 target is unaligned). */
bef84347 1605
b21fb038 1606 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1607 {
1608 if (TARGET_MULTIPLE)
1609 {
1610 target_flags &= ~MASK_MULTIPLE;
b21fb038 1611 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1612 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1613 }
1614
1615 if (TARGET_STRING)
1616 {
1617 target_flags &= ~MASK_STRING;
b21fb038 1618 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1619 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1620 }
1621 }
3933e0e1 1622
38c1f2d7
MM
1623 /* Set debug flags */
1624 if (rs6000_debug_name)
1625 {
bfc79d3b 1626 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1627 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1628 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1629 rs6000_debug_stack = 1;
bfc79d3b 1630 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1631 rs6000_debug_arg = 1;
1632 else
c725bd79 1633 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1634 }
1635
57ac7be9
AM
1636 if (rs6000_traceback_name)
1637 {
1638 if (! strncmp (rs6000_traceback_name, "full", 4))
1639 rs6000_traceback = traceback_full;
1640 else if (! strncmp (rs6000_traceback_name, "part", 4))
1641 rs6000_traceback = traceback_part;
1642 else if (! strncmp (rs6000_traceback_name, "no", 2))
1643 rs6000_traceback = traceback_none;
1644 else
9e637a26 1645 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1646 rs6000_traceback_name);
1647 }
1648
78f5898b
AH
1649 if (!rs6000_explicit_options.long_double)
1650 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1651
602ea4d3 1652#ifndef POWERPC_LINUX
d3603e8c 1653 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1654 rs6000_ieeequad = 1;
1655#endif
1656
0db747be
DE
1657 /* Enable Altivec ABI for AIX -maltivec. */
1658 if (TARGET_XCOFF && TARGET_ALTIVEC)
1659 rs6000_altivec_abi = 1;
1660
a2db2771
JJ
1661 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1662 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1663 be explicitly overridden in either case. */
1664 if (TARGET_ELF)
6d0ef01e 1665 {
a2db2771
JJ
1666 if (!rs6000_explicit_options.altivec_abi
1667 && (TARGET_64BIT || TARGET_ALTIVEC))
1668 rs6000_altivec_abi = 1;
1669
1670 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1671 if (!rs6000_explicit_options.vrsave)
1672 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1673 }
1674
594a51fe
SS
1675 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1676 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1677 {
1678 rs6000_darwin64_abi = 1;
9c7956fd 1679#if TARGET_MACHO
6ac49599 1680 darwin_one_byte_bool = 1;
9c7956fd 1681#endif
d9168963
SS
1682 /* Default to natural alignment, for better performance. */
1683 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1684 }
1685
194c524a
DE
1686 /* Place FP constants in the constant pool instead of TOC
1687 if section anchors enabled. */
1688 if (flag_section_anchors)
1689 TARGET_NO_FP_IN_TOC = 1;
1690
c4501e62
JJ
1691 /* Handle -mtls-size option. */
1692 rs6000_parse_tls_size_option ();
1693
a7ae18e2
AH
1694#ifdef SUBTARGET_OVERRIDE_OPTIONS
1695 SUBTARGET_OVERRIDE_OPTIONS;
1696#endif
1697#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1698 SUBSUBTARGET_OVERRIDE_OPTIONS;
1699#endif
4d4cbc0e
AH
1700#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1701 SUB3TARGET_OVERRIDE_OPTIONS;
1702#endif
a7ae18e2 1703
edae5fe3 1704 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1705 {
edae5fe3 1706 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1707 MASK_STRING above when optimizing for size. */
1708 if ((target_flags & MASK_STRING) != 0)
1709 target_flags = target_flags & ~MASK_STRING;
1710 }
1711 else if (rs6000_select[1].string != NULL)
1712 {
1713 /* For the powerpc-eabispe configuration, we set all these by
1714 default, so let's unset them if we manually set another
1715 CPU that is not the E500. */
a2db2771 1716 if (!rs6000_explicit_options.spe_abi)
5da702b1 1717 rs6000_spe_abi = 0;
78f5898b 1718 if (!rs6000_explicit_options.spe)
5da702b1 1719 rs6000_spe = 0;
78f5898b 1720 if (!rs6000_explicit_options.float_gprs)
5da702b1 1721 rs6000_float_gprs = 0;
78f5898b 1722 if (!rs6000_explicit_options.isel)
5da702b1
AH
1723 rs6000_isel = 0;
1724 }
b5044283 1725
eca0d5e8
JM
1726 /* Detect invalid option combinations with E500. */
1727 CHECK_E500_OPTIONS;
1728
ec507f2d 1729 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1730 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1731 && rs6000_cpu != PROCESSOR_POWER6
1732 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1733 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1734 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1735 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1736 || rs6000_cpu == PROCESSOR_POWER5
1737 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1738
ec507f2d
DE
1739 rs6000_sched_restricted_insns_priority
1740 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1741
569fa502 1742 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1743 rs6000_sched_costly_dep
1744 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1745
569fa502
DN
1746 if (rs6000_sched_costly_dep_str)
1747 {
f676971a 1748 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1749 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1750 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1751 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1752 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1753 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1754 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1755 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1756 else
c4ad648e 1757 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1758 }
1759
1760 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1761 rs6000_sched_insert_nops
1762 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1763
cbe26ab8
DN
1764 if (rs6000_sched_insert_nops_str)
1765 {
1766 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1767 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1768 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1769 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1770 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1771 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1772 else
c4ad648e 1773 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1774 }
1775
c81bebd7 1776#ifdef TARGET_REGNAMES
a4f6c312
SS
1777 /* If the user desires alternate register names, copy in the
1778 alternate names now. */
c81bebd7 1779 if (TARGET_REGNAMES)
4e135bdd 1780 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1781#endif
1782
df01da37 1783 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1784 If -maix-struct-return or -msvr4-struct-return was explicitly
1785 used, don't override with the ABI default. */
df01da37
DE
1786 if (!rs6000_explicit_options.aix_struct_ret)
1787 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1788
602ea4d3 1789 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1790 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1791
f676971a 1792 if (TARGET_TOC)
9ebbca7d 1793 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1794
301d03af
RS
1795 /* We can only guarantee the availability of DI pseudo-ops when
1796 assembling for 64-bit targets. */
ae6c1efd 1797 if (!TARGET_64BIT)
301d03af
RS
1798 {
1799 targetm.asm_out.aligned_op.di = NULL;
1800 targetm.asm_out.unaligned_op.di = NULL;
1801 }
1802
1494c534
DE
1803 /* Set branch target alignment, if not optimizing for size. */
1804 if (!optimize_size)
1805 {
d296e02e
AP
1806 /* Cell wants to be aligned 8byte for dual issue. */
1807 if (rs6000_cpu == PROCESSOR_CELL)
1808 {
1809 if (align_functions <= 0)
1810 align_functions = 8;
1811 if (align_jumps <= 0)
1812 align_jumps = 8;
1813 if (align_loops <= 0)
1814 align_loops = 8;
1815 }
44cd321e 1816 if (rs6000_align_branch_targets)
1494c534
DE
1817 {
1818 if (align_functions <= 0)
1819 align_functions = 16;
1820 if (align_jumps <= 0)
1821 align_jumps = 16;
1822 if (align_loops <= 0)
1823 align_loops = 16;
1824 }
1825 if (align_jumps_max_skip <= 0)
1826 align_jumps_max_skip = 15;
1827 if (align_loops_max_skip <= 0)
1828 align_loops_max_skip = 15;
1829 }
2792d578 1830
71f123ca
FS
1831 /* Arrange to save and restore machine status around nested functions. */
1832 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1833
1834 /* We should always be splitting complex arguments, but we can't break
1835 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1836 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1837 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1838
1839 /* Initialize rs6000_cost with the appropriate target costs. */
1840 if (optimize_size)
1841 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1842 else
1843 switch (rs6000_cpu)
1844 {
1845 case PROCESSOR_RIOS1:
1846 rs6000_cost = &rios1_cost;
1847 break;
1848
1849 case PROCESSOR_RIOS2:
1850 rs6000_cost = &rios2_cost;
1851 break;
1852
1853 case PROCESSOR_RS64A:
1854 rs6000_cost = &rs64a_cost;
1855 break;
1856
1857 case PROCESSOR_MPCCORE:
1858 rs6000_cost = &mpccore_cost;
1859 break;
1860
1861 case PROCESSOR_PPC403:
1862 rs6000_cost = &ppc403_cost;
1863 break;
1864
1865 case PROCESSOR_PPC405:
1866 rs6000_cost = &ppc405_cost;
1867 break;
1868
1869 case PROCESSOR_PPC440:
1870 rs6000_cost = &ppc440_cost;
1871 break;
1872
1873 case PROCESSOR_PPC601:
1874 rs6000_cost = &ppc601_cost;
1875 break;
1876
1877 case PROCESSOR_PPC603:
1878 rs6000_cost = &ppc603_cost;
1879 break;
1880
1881 case PROCESSOR_PPC604:
1882 rs6000_cost = &ppc604_cost;
1883 break;
1884
1885 case PROCESSOR_PPC604e:
1886 rs6000_cost = &ppc604e_cost;
1887 break;
1888
1889 case PROCESSOR_PPC620:
8b897cfa
RS
1890 rs6000_cost = &ppc620_cost;
1891 break;
1892
f0517163
RS
1893 case PROCESSOR_PPC630:
1894 rs6000_cost = &ppc630_cost;
1895 break;
1896
982afe02 1897 case PROCESSOR_CELL:
d296e02e
AP
1898 rs6000_cost = &ppccell_cost;
1899 break;
1900
8b897cfa
RS
1901 case PROCESSOR_PPC750:
1902 case PROCESSOR_PPC7400:
1903 rs6000_cost = &ppc750_cost;
1904 break;
1905
1906 case PROCESSOR_PPC7450:
1907 rs6000_cost = &ppc7450_cost;
1908 break;
1909
1910 case PROCESSOR_PPC8540:
1911 rs6000_cost = &ppc8540_cost;
1912 break;
1913
fa41c305
EW
1914 case PROCESSOR_PPCE300C2:
1915 case PROCESSOR_PPCE300C3:
1916 rs6000_cost = &ppce300c2c3_cost;
1917 break;
1918
edae5fe3
DE
1919 case PROCESSOR_PPCE500MC:
1920 rs6000_cost = &ppce500mc_cost;
1921 break;
1922
8b897cfa
RS
1923 case PROCESSOR_POWER4:
1924 case PROCESSOR_POWER5:
1925 rs6000_cost = &power4_cost;
1926 break;
1927
44cd321e
PS
1928 case PROCESSOR_POWER6:
1929 rs6000_cost = &power6_cost;
1930 break;
1931
8b897cfa 1932 default:
37409796 1933 gcc_unreachable ();
8b897cfa 1934 }
0b11da67
DE
1935
1936 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1937 set_param_value ("simultaneous-prefetches",
1938 rs6000_cost->simultaneous_prefetches);
1939 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1940 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1941 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1942 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1943 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1944 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1945
1946 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1947 can be optimized to ap = __builtin_next_arg (0). */
1948 if (DEFAULT_ABI != ABI_V4)
1949 targetm.expand_builtin_va_start = NULL;
5248c961 1950}
5accd822 1951
7ccf35ed
DN
1952/* Implement targetm.vectorize.builtin_mask_for_load. */
1953static tree
1954rs6000_builtin_mask_for_load (void)
1955{
1956 if (TARGET_ALTIVEC)
1957 return altivec_builtin_mask_for_load;
1958 else
1959 return 0;
1960}
1961
f57d17f1
TM
1962/* Implement targetm.vectorize.builtin_conversion. */
1963static tree
1964rs6000_builtin_conversion (enum tree_code code, tree type)
1965{
1966 if (!TARGET_ALTIVEC)
1967 return NULL_TREE;
982afe02 1968
f57d17f1
TM
1969 switch (code)
1970 {
1971 case FLOAT_EXPR:
1972 switch (TYPE_MODE (type))
1973 {
1974 case V4SImode:
982afe02 1975 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1976 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1977 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1978 default:
1979 return NULL_TREE;
1980 }
1981 default:
1982 return NULL_TREE;
1983 }
1984}
1985
89d67cca
DN
1986/* Implement targetm.vectorize.builtin_mul_widen_even. */
1987static tree
1988rs6000_builtin_mul_widen_even (tree type)
1989{
1990 if (!TARGET_ALTIVEC)
1991 return NULL_TREE;
1992
1993 switch (TYPE_MODE (type))
1994 {
1995 case V8HImode:
982afe02 1996 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1997 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1998 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1999
2000 case V16QImode:
2001 return TYPE_UNSIGNED (type) ?
2002 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
2003 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
2004 default:
2005 return NULL_TREE;
2006 }
2007}
2008
2009/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2010static tree
2011rs6000_builtin_mul_widen_odd (tree type)
2012{
2013 if (!TARGET_ALTIVEC)
2014 return NULL_TREE;
2015
2016 switch (TYPE_MODE (type))
2017 {
2018 case V8HImode:
2019 return TYPE_UNSIGNED (type) ?
2020 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
2021 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
2022
2023 case V16QImode:
2024 return TYPE_UNSIGNED (type) ?
2025 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
2026 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
2027 default:
2028 return NULL_TREE;
2029 }
2030}
2031
5b900a4c
DN
2032
2033/* Return true iff, data reference of TYPE can reach vector alignment (16)
2034 after applying N number of iterations. This routine does not determine
2035 how may iterations are required to reach desired alignment. */
2036
2037static bool
3101faab 2038rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2039{
2040 if (is_packed)
2041 return false;
2042
2043 if (TARGET_32BIT)
2044 {
2045 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2046 return true;
2047
2048 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2049 return true;
2050
2051 return false;
2052 }
2053 else
2054 {
2055 if (TARGET_MACHO)
2056 return false;
2057
2058 /* Assuming that all other types are naturally aligned. CHECKME! */
2059 return true;
2060 }
2061}
2062
5da702b1
AH
2063/* Handle generic options of the form -mfoo=yes/no.
2064 NAME is the option name.
2065 VALUE is the option value.
2066 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2067 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2068static void
5da702b1 2069rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2070{
5da702b1 2071 if (value == 0)
993f19a8 2072 return;
5da702b1
AH
2073 else if (!strcmp (value, "yes"))
2074 *flag = 1;
2075 else if (!strcmp (value, "no"))
2076 *flag = 0;
08b57fb3 2077 else
5da702b1 2078 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2079}
2080
c4501e62
JJ
2081/* Validate and record the size specified with the -mtls-size option. */
2082
2083static void
863d938c 2084rs6000_parse_tls_size_option (void)
c4501e62
JJ
2085{
2086 if (rs6000_tls_size_string == 0)
2087 return;
2088 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2089 rs6000_tls_size = 16;
2090 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2091 rs6000_tls_size = 32;
2092 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2093 rs6000_tls_size = 64;
2094 else
9e637a26 2095 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2096}
2097
5accd822 2098void
a2369ed3 2099optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2100{
2e3f0db6
DJ
2101 if (DEFAULT_ABI == ABI_DARWIN)
2102 /* The Darwin libraries never set errno, so we might as well
2103 avoid calling them when that's the only reason we would. */
2104 flag_errno_math = 0;
59d6560b
DE
2105
2106 /* Double growth factor to counter reduced min jump length. */
2107 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2108
2109 /* Enable section anchors by default.
2110 Skip section anchors for Objective C and Objective C++
2111 until front-ends fixed. */
23f99493 2112 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2113 flag_section_anchors = 1;
5accd822 2114}
78f5898b
AH
2115
2116/* Implement TARGET_HANDLE_OPTION. */
2117
2118static bool
2119rs6000_handle_option (size_t code, const char *arg, int value)
2120{
2121 switch (code)
2122 {
2123 case OPT_mno_power:
2124 target_flags &= ~(MASK_POWER | MASK_POWER2
2125 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2126 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2127 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2128 break;
2129 case OPT_mno_powerpc:
2130 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2131 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2132 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2133 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2134 break;
2135 case OPT_mfull_toc:
d2894ab5
DE
2136 target_flags &= ~MASK_MINIMAL_TOC;
2137 TARGET_NO_FP_IN_TOC = 0;
2138 TARGET_NO_SUM_IN_TOC = 0;
2139 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2140#ifdef TARGET_USES_SYSV4_OPT
2141 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2142 just the same as -mminimal-toc. */
2143 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2144 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2145#endif
2146 break;
2147
2148#ifdef TARGET_USES_SYSV4_OPT
2149 case OPT_mtoc:
2150 /* Make -mtoc behave like -mminimal-toc. */
2151 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2152 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2153 break;
2154#endif
2155
2156#ifdef TARGET_USES_AIX64_OPT
2157 case OPT_maix64:
2158#else
2159 case OPT_m64:
2160#endif
2c9c9afd
AM
2161 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2162 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2163 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2164 break;
2165
2166#ifdef TARGET_USES_AIX64_OPT
2167 case OPT_maix32:
2168#else
2169 case OPT_m32:
2170#endif
2171 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2172 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2173 break;
2174
2175 case OPT_minsert_sched_nops_:
2176 rs6000_sched_insert_nops_str = arg;
2177 break;
2178
2179 case OPT_mminimal_toc:
2180 if (value == 1)
2181 {
d2894ab5
DE
2182 TARGET_NO_FP_IN_TOC = 0;
2183 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2184 }
2185 break;
2186
2187 case OPT_mpower:
2188 if (value == 1)
c2dba4ab
AH
2189 {
2190 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2191 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2192 }
78f5898b
AH
2193 break;
2194
2195 case OPT_mpower2:
2196 if (value == 1)
c2dba4ab
AH
2197 {
2198 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2199 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2200 }
78f5898b
AH
2201 break;
2202
2203 case OPT_mpowerpc_gpopt:
2204 case OPT_mpowerpc_gfxopt:
2205 if (value == 1)
c2dba4ab
AH
2206 {
2207 target_flags |= MASK_POWERPC;
2208 target_flags_explicit |= MASK_POWERPC;
2209 }
78f5898b
AH
2210 break;
2211
df01da37
DE
2212 case OPT_maix_struct_return:
2213 case OPT_msvr4_struct_return:
2214 rs6000_explicit_options.aix_struct_ret = true;
2215 break;
2216
78f5898b 2217 case OPT_mvrsave_:
a2db2771 2218 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2219 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2220 break;
78f5898b 2221
94f4765c
NF
2222 case OPT_misel:
2223 rs6000_explicit_options.isel = true;
2224 rs6000_isel = value;
2225 break;
2226
78f5898b
AH
2227 case OPT_misel_:
2228 rs6000_explicit_options.isel = true;
2229 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2230 break;
2231
94f4765c
NF
2232 case OPT_mspe:
2233 rs6000_explicit_options.spe = true;
2234 rs6000_spe = value;
2235 break;
2236
78f5898b
AH
2237 case OPT_mspe_:
2238 rs6000_explicit_options.spe = true;
2239 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2240 break;
2241
2242 case OPT_mdebug_:
2243 rs6000_debug_name = arg;
2244 break;
2245
2246#ifdef TARGET_USES_SYSV4_OPT
2247 case OPT_mcall_:
2248 rs6000_abi_name = arg;
2249 break;
2250
2251 case OPT_msdata_:
2252 rs6000_sdata_name = arg;
2253 break;
2254
2255 case OPT_mtls_size_:
2256 rs6000_tls_size_string = arg;
2257 break;
2258
2259 case OPT_mrelocatable:
2260 if (value == 1)
c2dba4ab 2261 {
e0bf274f
AM
2262 target_flags |= MASK_MINIMAL_TOC;
2263 target_flags_explicit |= MASK_MINIMAL_TOC;
2264 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2265 }
78f5898b
AH
2266 break;
2267
2268 case OPT_mrelocatable_lib:
2269 if (value == 1)
c2dba4ab 2270 {
e0bf274f
AM
2271 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2272 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2273 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2274 }
78f5898b 2275 else
c2dba4ab
AH
2276 {
2277 target_flags &= ~MASK_RELOCATABLE;
2278 target_flags_explicit |= MASK_RELOCATABLE;
2279 }
78f5898b
AH
2280 break;
2281#endif
2282
2283 case OPT_mabi_:
78f5898b
AH
2284 if (!strcmp (arg, "altivec"))
2285 {
a2db2771 2286 rs6000_explicit_options.altivec_abi = true;
78f5898b 2287 rs6000_altivec_abi = 1;
a2db2771
JJ
2288
2289 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2290 rs6000_spe_abi = 0;
2291 }
2292 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2293 {
a2db2771 2294 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2295 rs6000_altivec_abi = 0;
2296 }
78f5898b
AH
2297 else if (! strcmp (arg, "spe"))
2298 {
a2db2771 2299 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2300 rs6000_spe_abi = 1;
2301 rs6000_altivec_abi = 0;
2302 if (!TARGET_SPE_ABI)
2303 error ("not configured for ABI: '%s'", arg);
2304 }
2305 else if (! strcmp (arg, "no-spe"))
d3603e8c 2306 {
a2db2771 2307 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2308 rs6000_spe_abi = 0;
2309 }
78f5898b
AH
2310
2311 /* These are here for testing during development only, do not
2312 document in the manual please. */
2313 else if (! strcmp (arg, "d64"))
2314 {
2315 rs6000_darwin64_abi = 1;
2316 warning (0, "Using darwin64 ABI");
2317 }
2318 else if (! strcmp (arg, "d32"))
2319 {
2320 rs6000_darwin64_abi = 0;
2321 warning (0, "Using old darwin ABI");
2322 }
2323
602ea4d3
JJ
2324 else if (! strcmp (arg, "ibmlongdouble"))
2325 {
d3603e8c 2326 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2327 rs6000_ieeequad = 0;
2328 warning (0, "Using IBM extended precision long double");
2329 }
2330 else if (! strcmp (arg, "ieeelongdouble"))
2331 {
d3603e8c 2332 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2333 rs6000_ieeequad = 1;
2334 warning (0, "Using IEEE extended precision long double");
2335 }
2336
78f5898b
AH
2337 else
2338 {
2339 error ("unknown ABI specified: '%s'", arg);
2340 return false;
2341 }
2342 break;
2343
2344 case OPT_mcpu_:
2345 rs6000_select[1].string = arg;
2346 break;
2347
2348 case OPT_mtune_:
2349 rs6000_select[2].string = arg;
2350 break;
2351
2352 case OPT_mtraceback_:
2353 rs6000_traceback_name = arg;
2354 break;
2355
2356 case OPT_mfloat_gprs_:
2357 rs6000_explicit_options.float_gprs = true;
2358 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2359 rs6000_float_gprs = 1;
2360 else if (! strcmp (arg, "double"))
2361 rs6000_float_gprs = 2;
2362 else if (! strcmp (arg, "no"))
2363 rs6000_float_gprs = 0;
2364 else
2365 {
2366 error ("invalid option for -mfloat-gprs: '%s'", arg);
2367 return false;
2368 }
2369 break;
2370
2371 case OPT_mlong_double_:
2372 rs6000_explicit_options.long_double = true;
2373 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2374 if (value != 64 && value != 128)
2375 {
2376 error ("Unknown switch -mlong-double-%s", arg);
2377 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2378 return false;
2379 }
2380 else
2381 rs6000_long_double_type_size = value;
2382 break;
2383
2384 case OPT_msched_costly_dep_:
2385 rs6000_sched_costly_dep_str = arg;
2386 break;
2387
2388 case OPT_malign_:
2389 rs6000_explicit_options.alignment = true;
2390 if (! strcmp (arg, "power"))
2391 {
2392 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2393 some C library functions, so warn about it. The flag may be
2394 useful for performance studies from time to time though, so
2395 don't disable it entirely. */
2396 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2397 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2398 " it is incompatible with the installed C and C++ libraries");
2399 rs6000_alignment_flags = MASK_ALIGN_POWER;
2400 }
2401 else if (! strcmp (arg, "natural"))
2402 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2403 else
2404 {
2405 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2406 return false;
2407 }
2408 break;
2409 }
2410 return true;
2411}
3cfa4909
MM
2412\f
2413/* Do anything needed at the start of the asm file. */
2414
1bc7c5b6 2415static void
863d938c 2416rs6000_file_start (void)
3cfa4909 2417{
c4d38ccb 2418 size_t i;
3cfa4909 2419 char buffer[80];
d330fd93 2420 const char *start = buffer;
3cfa4909 2421 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2422 const char *default_cpu = TARGET_CPU_DEFAULT;
2423 FILE *file = asm_out_file;
2424
2425 default_file_start ();
2426
2427#ifdef TARGET_BI_ARCH
2428 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2429 default_cpu = 0;
2430#endif
3cfa4909
MM
2431
2432 if (flag_verbose_asm)
2433 {
2434 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2435 rs6000_select[0].string = default_cpu;
2436
b6a1cbae 2437 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2438 {
2439 ptr = &rs6000_select[i];
2440 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2441 {
2442 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2443 start = "";
2444 }
2445 }
2446
9c6b4ed9 2447 if (PPC405_ERRATUM77)
b0bfee6e 2448 {
9c6b4ed9 2449 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2450 start = "";
2451 }
b0bfee6e 2452
b91da81f 2453#ifdef USING_ELFOS_H
3cfa4909
MM
2454 switch (rs6000_sdata)
2455 {
2456 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2457 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2458 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2459 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2460 }
2461
2462 if (rs6000_sdata && g_switch_value)
2463 {
307b599c
MK
2464 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2465 g_switch_value);
3cfa4909
MM
2466 start = "";
2467 }
2468#endif
2469
2470 if (*start == '\0')
949ea356 2471 putc ('\n', file);
3cfa4909 2472 }
b723e82f 2473
e51917ae
JM
2474#ifdef HAVE_AS_GNU_ATTRIBUTE
2475 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2476 {
2477 fprintf (file, "\t.gnu_attribute 4, %d\n",
2478 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2479 fprintf (file, "\t.gnu_attribute 8, %d\n",
2480 (TARGET_ALTIVEC_ABI ? 2
2481 : TARGET_SPE_ABI ? 3
2482 : 1));
2483 }
e51917ae
JM
2484#endif
2485
b723e82f
JJ
2486 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2487 {
d6b5193b
RS
2488 switch_to_section (toc_section);
2489 switch_to_section (text_section);
b723e82f 2490 }
3cfa4909 2491}
c4e18b1c 2492
5248c961 2493\f
a0ab749a 2494/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2495
2496int
863d938c 2497direct_return (void)
9878760c 2498{
4697a36c
MM
2499 if (reload_completed)
2500 {
2501 rs6000_stack_t *info = rs6000_stack_info ();
2502
2503 if (info->first_gp_reg_save == 32
2504 && info->first_fp_reg_save == 64
00b960c7 2505 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2506 && ! info->lr_save_p
2507 && ! info->cr_save_p
00b960c7 2508 && info->vrsave_mask == 0
c81fc13e 2509 && ! info->push_p)
4697a36c
MM
2510 return 1;
2511 }
2512
2513 return 0;
9878760c
RK
2514}
2515
4e74d8ec
MM
2516/* Return the number of instructions it takes to form a constant in an
2517 integer register. */
2518
48d72335 2519int
a2369ed3 2520num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2521{
2522 /* signed constant loadable with {cal|addi} */
547b216d 2523 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2524 return 1;
2525
4e74d8ec 2526 /* constant loadable with {cau|addis} */
547b216d
DE
2527 else if ((value & 0xffff) == 0
2528 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2529 return 1;
2530
5f59ecb7 2531#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2532 else if (TARGET_POWERPC64)
4e74d8ec 2533 {
a65c591c
DE
2534 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2535 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2536
a65c591c 2537 if (high == 0 || high == -1)
4e74d8ec
MM
2538 return 2;
2539
a65c591c 2540 high >>= 1;
4e74d8ec 2541
a65c591c 2542 if (low == 0)
4e74d8ec 2543 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2544 else
2545 return (num_insns_constant_wide (high)
e396202a 2546 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2547 }
2548#endif
2549
2550 else
2551 return 2;
2552}
2553
2554int
a2369ed3 2555num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2556{
37409796 2557 HOST_WIDE_INT low, high;
bb8df8a6 2558
37409796 2559 switch (GET_CODE (op))
0d30d435 2560 {
37409796 2561 case CONST_INT:
0d30d435 2562#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2563 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2564 && mask64_operand (op, mode))
c4ad648e 2565 return 2;
0d30d435
DE
2566 else
2567#endif
2568 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2569
37409796 2570 case CONST_DOUBLE:
e41b2a33 2571 if (mode == SFmode || mode == SDmode)
37409796
NS
2572 {
2573 long l;
2574 REAL_VALUE_TYPE rv;
bb8df8a6 2575
37409796 2576 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2577 if (DECIMAL_FLOAT_MODE_P (mode))
2578 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2579 else
2580 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2581 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2582 }
a260abc9 2583
37409796
NS
2584 if (mode == VOIDmode || mode == DImode)
2585 {
2586 high = CONST_DOUBLE_HIGH (op);
2587 low = CONST_DOUBLE_LOW (op);
2588 }
2589 else
2590 {
2591 long l[2];
2592 REAL_VALUE_TYPE rv;
bb8df8a6 2593
37409796 2594 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2595 if (DECIMAL_FLOAT_MODE_P (mode))
2596 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2597 else
2598 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2599 high = l[WORDS_BIG_ENDIAN == 0];
2600 low = l[WORDS_BIG_ENDIAN != 0];
2601 }
47ad8c61 2602
37409796
NS
2603 if (TARGET_32BIT)
2604 return (num_insns_constant_wide (low)
2605 + num_insns_constant_wide (high));
2606 else
2607 {
2608 if ((high == 0 && low >= 0)
2609 || (high == -1 && low < 0))
2610 return num_insns_constant_wide (low);
bb8df8a6 2611
1990cd79 2612 else if (mask64_operand (op, mode))
37409796 2613 return 2;
bb8df8a6 2614
37409796
NS
2615 else if (low == 0)
2616 return num_insns_constant_wide (high) + 1;
bb8df8a6 2617
37409796
NS
2618 else
2619 return (num_insns_constant_wide (high)
2620 + num_insns_constant_wide (low) + 1);
2621 }
bb8df8a6 2622
37409796
NS
2623 default:
2624 gcc_unreachable ();
4e74d8ec 2625 }
4e74d8ec
MM
2626}
2627
0972012c
RS
2628/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2629 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2630 corresponding element of the vector, but for V4SFmode and V2SFmode,
2631 the corresponding "float" is interpreted as an SImode integer. */
2632
2633static HOST_WIDE_INT
2634const_vector_elt_as_int (rtx op, unsigned int elt)
2635{
2636 rtx tmp = CONST_VECTOR_ELT (op, elt);
2637 if (GET_MODE (op) == V4SFmode
2638 || GET_MODE (op) == V2SFmode)
2639 tmp = gen_lowpart (SImode, tmp);
2640 return INTVAL (tmp);
2641}
452a7d36 2642
77ccdfed 2643/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2644 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2645 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2646 all items are set to the same value and contain COPIES replicas of the
2647 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2648 operand and the others are set to the value of the operand's msb. */
2649
2650static bool
2651vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2652{
66180ff3
PB
2653 enum machine_mode mode = GET_MODE (op);
2654 enum machine_mode inner = GET_MODE_INNER (mode);
2655
2656 unsigned i;
2657 unsigned nunits = GET_MODE_NUNITS (mode);
2658 unsigned bitsize = GET_MODE_BITSIZE (inner);
2659 unsigned mask = GET_MODE_MASK (inner);
2660
0972012c 2661 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2662 HOST_WIDE_INT splat_val = val;
2663 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2664
2665 /* Construct the value to be splatted, if possible. If not, return 0. */
2666 for (i = 2; i <= copies; i *= 2)
452a7d36 2667 {
66180ff3
PB
2668 HOST_WIDE_INT small_val;
2669 bitsize /= 2;
2670 small_val = splat_val >> bitsize;
2671 mask >>= bitsize;
2672 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2673 return false;
2674 splat_val = small_val;
2675 }
c4ad648e 2676
66180ff3
PB
2677 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2678 if (EASY_VECTOR_15 (splat_val))
2679 ;
2680
2681 /* Also check if we can splat, and then add the result to itself. Do so if
2682 the value is positive, of if the splat instruction is using OP's mode;
2683 for splat_val < 0, the splat and the add should use the same mode. */
2684 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2685 && (splat_val >= 0 || (step == 1 && copies == 1)))
2686 ;
2687
2688 else
2689 return false;
2690
2691 /* Check if VAL is present in every STEP-th element, and the
2692 other elements are filled with its most significant bit. */
2693 for (i = 0; i < nunits - 1; ++i)
2694 {
2695 HOST_WIDE_INT desired_val;
2696 if (((i + 1) & (step - 1)) == 0)
2697 desired_val = val;
2698 else
2699 desired_val = msb_val;
2700
0972012c 2701 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2702 return false;
452a7d36 2703 }
66180ff3
PB
2704
2705 return true;
452a7d36
HP
2706}
2707
69ef87e2 2708
77ccdfed 2709/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2710 with a vspltisb, vspltish or vspltisw. */
2711
2712bool
2713easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2714{
66180ff3 2715 unsigned step, copies;
d744e06e 2716
66180ff3
PB
2717 if (mode == VOIDmode)
2718 mode = GET_MODE (op);
2719 else if (mode != GET_MODE (op))
2720 return false;
d744e06e 2721
66180ff3
PB
2722 /* Start with a vspltisw. */
2723 step = GET_MODE_NUNITS (mode) / 4;
2724 copies = 1;
2725
2726 if (vspltis_constant (op, step, copies))
2727 return true;
2728
2729 /* Then try with a vspltish. */
2730 if (step == 1)
2731 copies <<= 1;
2732 else
2733 step >>= 1;
2734
2735 if (vspltis_constant (op, step, copies))
2736 return true;
2737
2738 /* And finally a vspltisb. */
2739 if (step == 1)
2740 copies <<= 1;
2741 else
2742 step >>= 1;
2743
2744 if (vspltis_constant (op, step, copies))
2745 return true;
2746
2747 return false;
d744e06e
AH
2748}
2749
66180ff3
PB
2750/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2751 result is OP. Abort if it is not possible. */
d744e06e 2752
f676971a 2753rtx
66180ff3 2754gen_easy_altivec_constant (rtx op)
452a7d36 2755{
66180ff3
PB
2756 enum machine_mode mode = GET_MODE (op);
2757 int nunits = GET_MODE_NUNITS (mode);
2758 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2759 unsigned step = nunits / 4;
2760 unsigned copies = 1;
2761
2762 /* Start with a vspltisw. */
2763 if (vspltis_constant (op, step, copies))
2764 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2765
2766 /* Then try with a vspltish. */
2767 if (step == 1)
2768 copies <<= 1;
2769 else
2770 step >>= 1;
2771
2772 if (vspltis_constant (op, step, copies))
2773 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2774
2775 /* And finally a vspltisb. */
2776 if (step == 1)
2777 copies <<= 1;
2778 else
2779 step >>= 1;
2780
2781 if (vspltis_constant (op, step, copies))
2782 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2783
2784 gcc_unreachable ();
d744e06e
AH
2785}
2786
2787const char *
a2369ed3 2788output_vec_const_move (rtx *operands)
d744e06e
AH
2789{
2790 int cst, cst2;
2791 enum machine_mode mode;
2792 rtx dest, vec;
2793
2794 dest = operands[0];
2795 vec = operands[1];
d744e06e 2796 mode = GET_MODE (dest);
69ef87e2 2797
d744e06e
AH
2798 if (TARGET_ALTIVEC)
2799 {
66180ff3 2800 rtx splat_vec;
d744e06e
AH
2801 if (zero_constant (vec, mode))
2802 return "vxor %0,%0,%0";
37409796 2803
66180ff3
PB
2804 splat_vec = gen_easy_altivec_constant (vec);
2805 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2806 operands[1] = XEXP (splat_vec, 0);
2807 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2808 return "#";
bb8df8a6 2809
66180ff3 2810 switch (GET_MODE (splat_vec))
98ef3137 2811 {
37409796 2812 case V4SImode:
66180ff3 2813 return "vspltisw %0,%1";
c4ad648e 2814
37409796 2815 case V8HImode:
66180ff3 2816 return "vspltish %0,%1";
c4ad648e 2817
37409796 2818 case V16QImode:
66180ff3 2819 return "vspltisb %0,%1";
bb8df8a6 2820
37409796
NS
2821 default:
2822 gcc_unreachable ();
98ef3137 2823 }
69ef87e2
AH
2824 }
2825
37409796 2826 gcc_assert (TARGET_SPE);
bb8df8a6 2827
37409796
NS
2828 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2829 pattern of V1DI, V4HI, and V2SF.
2830
2831 FIXME: We should probably return # and add post reload
2832 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2833 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2834 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2835 operands[1] = CONST_VECTOR_ELT (vec, 0);
2836 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2837 if (cst == cst2)
2838 return "li %0,%1\n\tevmergelo %0,%0,%0";
2839 else
2840 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2841}
2842
f5027409
RE
2843/* Initialize TARGET of vector PAIRED to VALS. */
2844
2845void
2846paired_expand_vector_init (rtx target, rtx vals)
2847{
2848 enum machine_mode mode = GET_MODE (target);
2849 int n_elts = GET_MODE_NUNITS (mode);
2850 int n_var = 0;
2851 rtx x, new, tmp, constant_op, op1, op2;
2852 int i;
2853
2854 for (i = 0; i < n_elts; ++i)
2855 {
2856 x = XVECEXP (vals, 0, i);
2857 if (!CONSTANT_P (x))
2858 ++n_var;
2859 }
2860 if (n_var == 0)
2861 {
2862 /* Load from constant pool. */
2863 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2864 return;
2865 }
2866
2867 if (n_var == 2)
2868 {
2869 /* The vector is initialized only with non-constants. */
2870 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2871 XVECEXP (vals, 0, 1));
2872
2873 emit_move_insn (target, new);
2874 return;
2875 }
2876
2877 /* One field is non-constant and the other one is a constant. Load the
2878 constant from the constant pool and use ps_merge instruction to
2879 construct the whole vector. */
2880 op1 = XVECEXP (vals, 0, 0);
2881 op2 = XVECEXP (vals, 0, 1);
2882
2883 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2884
2885 tmp = gen_reg_rtx (GET_MODE (constant_op));
2886 emit_move_insn (tmp, constant_op);
2887
2888 if (CONSTANT_P (op1))
2889 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2890 else
2891 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2892
2893 emit_move_insn (target, new);
2894}
2895
e2e95f45
RE
2896void
2897paired_expand_vector_move (rtx operands[])
2898{
2899 rtx op0 = operands[0], op1 = operands[1];
2900
2901 emit_move_insn (op0, op1);
2902}
2903
2904/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2905 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2906 operands for the relation operation COND. This is a recursive
2907 function. */
2908
2909static void
2910paired_emit_vector_compare (enum rtx_code rcode,
2911 rtx dest, rtx op0, rtx op1,
2912 rtx cc_op0, rtx cc_op1)
2913{
2914 rtx tmp = gen_reg_rtx (V2SFmode);
2915 rtx tmp1, max, min, equal_zero;
2916
2917 gcc_assert (TARGET_PAIRED_FLOAT);
2918 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2919
2920 switch (rcode)
2921 {
2922 case LT:
2923 case LTU:
2924 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2925 return;
2926 case GE:
2927 case GEU:
2928 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2929 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2930 return;
2931 case LE:
2932 case LEU:
2933 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2934 return;
2935 case GT:
2936 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2937 return;
2938 case EQ:
2939 tmp1 = gen_reg_rtx (V2SFmode);
2940 max = gen_reg_rtx (V2SFmode);
2941 min = gen_reg_rtx (V2SFmode);
2942 equal_zero = gen_reg_rtx (V2SFmode);
2943
2944 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2945 emit_insn (gen_selv2sf4
2946 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2947 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2948 emit_insn (gen_selv2sf4
2949 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2950 emit_insn (gen_subv2sf3 (tmp1, min, max));
2951 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2952 return;
2953 case NE:
2954 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2955 return;
2956 case UNLE:
2957 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2958 return;
2959 case UNLT:
2960 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2961 return;
2962 case UNGE:
2963 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2964 return;
2965 case UNGT:
2966 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2967 return;
2968 default:
2969 gcc_unreachable ();
2970 }
2971
2972 return;
2973}
2974
2975/* Emit vector conditional expression.
2976 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2977 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2978
2979int
2980paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2981 rtx cond, rtx cc_op0, rtx cc_op1)
2982{
2983 enum rtx_code rcode = GET_CODE (cond);
2984
2985 if (!TARGET_PAIRED_FLOAT)
2986 return 0;
2987
2988 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2989
2990 return 1;
2991}
2992
7a4eca66
DE
2993/* Initialize vector TARGET to VALS. */
2994
2995void
2996rs6000_expand_vector_init (rtx target, rtx vals)
2997{
2998 enum machine_mode mode = GET_MODE (target);
2999 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3000 int n_elts = GET_MODE_NUNITS (mode);
3001 int n_var = 0, one_var = -1;
3002 bool all_same = true, all_const_zero = true;
3003 rtx x, mem;
3004 int i;
3005
3006 for (i = 0; i < n_elts; ++i)
3007 {
3008 x = XVECEXP (vals, 0, i);
3009 if (!CONSTANT_P (x))
3010 ++n_var, one_var = i;
3011 else if (x != CONST0_RTX (inner_mode))
3012 all_const_zero = false;
3013
3014 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3015 all_same = false;
3016 }
3017
3018 if (n_var == 0)
3019 {
501fb355 3020 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3021 if (mode != V4SFmode && all_const_zero)
3022 {
3023 /* Zero register. */
3024 emit_insn (gen_rtx_SET (VOIDmode, target,
3025 gen_rtx_XOR (mode, target, target)));
3026 return;
3027 }
501fb355 3028 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3029 {
3030 /* Splat immediate. */
501fb355 3031 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3032 return;
3033 }
3034 else if (all_same)
3035 ; /* Splat vector element. */
3036 else
3037 {
3038 /* Load from constant pool. */
501fb355 3039 emit_move_insn (target, const_vec);
7a4eca66
DE
3040 return;
3041 }
3042 }
3043
3044 /* Store value to stack temp. Load vector element. Splat. */
3045 if (all_same)
3046 {
3047 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3048 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3049 XVECEXP (vals, 0, 0));
3050 x = gen_rtx_UNSPEC (VOIDmode,
3051 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3052 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3053 gen_rtvec (2,
3054 gen_rtx_SET (VOIDmode,
3055 target, mem),
3056 x)));
3057 x = gen_rtx_VEC_SELECT (inner_mode, target,
3058 gen_rtx_PARALLEL (VOIDmode,
3059 gen_rtvec (1, const0_rtx)));
3060 emit_insn (gen_rtx_SET (VOIDmode, target,
3061 gen_rtx_VEC_DUPLICATE (mode, x)));
3062 return;
3063 }
3064
3065 /* One field is non-constant. Load constant then overwrite
3066 varying field. */
3067 if (n_var == 1)
3068 {
3069 rtx copy = copy_rtx (vals);
3070
57b51d4d 3071 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3072 varying element. */
3073 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3074 rs6000_expand_vector_init (target, copy);
3075
3076 /* Insert variable. */
3077 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3078 return;
3079 }
3080
3081 /* Construct the vector in memory one field at a time
3082 and load the whole vector. */
3083 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3084 for (i = 0; i < n_elts; i++)
3085 emit_move_insn (adjust_address_nv (mem, inner_mode,
3086 i * GET_MODE_SIZE (inner_mode)),
3087 XVECEXP (vals, 0, i));
3088 emit_move_insn (target, mem);
3089}
3090
3091/* Set field ELT of TARGET to VAL. */
3092
3093void
3094rs6000_expand_vector_set (rtx target, rtx val, int elt)
3095{
3096 enum machine_mode mode = GET_MODE (target);
3097 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3098 rtx reg = gen_reg_rtx (mode);
3099 rtx mask, mem, x;
3100 int width = GET_MODE_SIZE (inner_mode);
3101 int i;
3102
3103 /* Load single variable value. */
3104 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3105 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3106 x = gen_rtx_UNSPEC (VOIDmode,
3107 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3108 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3109 gen_rtvec (2,
3110 gen_rtx_SET (VOIDmode,
3111 reg, mem),
3112 x)));
3113
3114 /* Linear sequence. */
3115 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3116 for (i = 0; i < 16; ++i)
3117 XVECEXP (mask, 0, i) = GEN_INT (i);
3118
3119 /* Set permute mask to insert element into target. */
3120 for (i = 0; i < width; ++i)
3121 XVECEXP (mask, 0, elt*width + i)
3122 = GEN_INT (i + 0x10);
3123 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3124 x = gen_rtx_UNSPEC (mode,
3125 gen_rtvec (3, target, reg,
3126 force_reg (V16QImode, x)),
3127 UNSPEC_VPERM);
3128 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3129}
3130
3131/* Extract field ELT from VEC into TARGET. */
3132
3133void
3134rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3135{
3136 enum machine_mode mode = GET_MODE (vec);
3137 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3138 rtx mem, x;
3139
3140 /* Allocate mode-sized buffer. */
3141 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3142
3143 /* Add offset to field within buffer matching vector element. */
3144 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3145
3146 /* Store single field into mode-sized buffer. */
3147 x = gen_rtx_UNSPEC (VOIDmode,
3148 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3149 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3150 gen_rtvec (2,
3151 gen_rtx_SET (VOIDmode,
3152 mem, vec),
3153 x)));
3154 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3155}
3156
0ba1b2ff
AM
3157/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3158 implement ANDing by the mask IN. */
3159void
a2369ed3 3160build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3161{
3162#if HOST_BITS_PER_WIDE_INT >= 64
3163 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3164 int shift;
3165
37409796 3166 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3167
3168 c = INTVAL (in);
3169 if (c & 1)
3170 {
3171 /* Assume c initially something like 0x00fff000000fffff. The idea
3172 is to rotate the word so that the middle ^^^^^^ group of zeros
3173 is at the MS end and can be cleared with an rldicl mask. We then
3174 rotate back and clear off the MS ^^ group of zeros with a
3175 second rldicl. */
3176 c = ~c; /* c == 0xff000ffffff00000 */
3177 lsb = c & -c; /* lsb == 0x0000000000100000 */
3178 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3179 c = ~c; /* c == 0x00fff000000fffff */
3180 c &= -lsb; /* c == 0x00fff00000000000 */
3181 lsb = c & -c; /* lsb == 0x0000100000000000 */
3182 c = ~c; /* c == 0xff000fffffffffff */
3183 c &= -lsb; /* c == 0xff00000000000000 */
3184 shift = 0;
3185 while ((lsb >>= 1) != 0)
3186 shift++; /* shift == 44 on exit from loop */
3187 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3188 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3189 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3190 }
3191 else
0ba1b2ff
AM
3192 {
3193 /* Assume c initially something like 0xff000f0000000000. The idea
3194 is to rotate the word so that the ^^^ middle group of zeros
3195 is at the LS end and can be cleared with an rldicr mask. We then
3196 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3197 a second rldicr. */
3198 lsb = c & -c; /* lsb == 0x0000010000000000 */
3199 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3200 c = ~c; /* c == 0x00fff0ffffffffff */
3201 c &= -lsb; /* c == 0x00fff00000000000 */
3202 lsb = c & -c; /* lsb == 0x0000100000000000 */
3203 c = ~c; /* c == 0xff000fffffffffff */
3204 c &= -lsb; /* c == 0xff00000000000000 */
3205 shift = 0;
3206 while ((lsb >>= 1) != 0)
3207 shift++; /* shift == 44 on exit from loop */
3208 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3209 m1 >>= shift; /* m1 == 0x0000000000000fff */
3210 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3211 }
3212
3213 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3214 masks will be all 1's. We are guaranteed more than one transition. */
3215 out[0] = GEN_INT (64 - shift);
3216 out[1] = GEN_INT (m1);
3217 out[2] = GEN_INT (shift);
3218 out[3] = GEN_INT (m2);
3219#else
045572c7
GK
3220 (void)in;
3221 (void)out;
37409796 3222 gcc_unreachable ();
0ba1b2ff 3223#endif
a260abc9
DE
3224}
3225
54b695e7 3226/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3227
3228bool
54b695e7
AH
3229invalid_e500_subreg (rtx op, enum machine_mode mode)
3230{
61c76239
JM
3231 if (TARGET_E500_DOUBLE)
3232 {
17caeff2 3233 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3234 subreg:TI and reg:TF. Decimal float modes are like integer
3235 modes (only low part of each register used) for this
3236 purpose. */
61c76239 3237 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3238 && (mode == SImode || mode == DImode || mode == TImode
3239 || mode == DDmode || mode == TDmode)
61c76239 3240 && REG_P (SUBREG_REG (op))
17caeff2 3241 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3242 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3243 return true;
3244
17caeff2
JM
3245 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3246 reg:TI. */
61c76239 3247 if (GET_CODE (op) == SUBREG
4f011e1e 3248 && (mode == DFmode || mode == TFmode)
61c76239 3249 && REG_P (SUBREG_REG (op))
17caeff2 3250 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3251 || GET_MODE (SUBREG_REG (op)) == TImode
3252 || GET_MODE (SUBREG_REG (op)) == DDmode
3253 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3254 return true;
3255 }
54b695e7 3256
61c76239
JM
3257 if (TARGET_SPE
3258 && GET_CODE (op) == SUBREG
3259 && mode == SImode
54b695e7 3260 && REG_P (SUBREG_REG (op))
14502dad 3261 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3262 return true;
3263
3264 return false;
3265}
3266
58182de3 3267/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3268 field is an FP double while the FP fields remain word aligned. */
3269
19d66194 3270unsigned int
fa5b0972
AM
3271rs6000_special_round_type_align (tree type, unsigned int computed,
3272 unsigned int specified)
95727fb8 3273{
fa5b0972 3274 unsigned int align = MAX (computed, specified);
95727fb8 3275 tree field = TYPE_FIELDS (type);
95727fb8 3276
bb8df8a6 3277 /* Skip all non field decls */
85962ac8 3278 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3279 field = TREE_CHAIN (field);
3280
fa5b0972
AM
3281 if (field != NULL && field != type)
3282 {
3283 type = TREE_TYPE (field);
3284 while (TREE_CODE (type) == ARRAY_TYPE)
3285 type = TREE_TYPE (type);
3286
3287 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3288 align = MAX (align, 64);
3289 }
95727fb8 3290
fa5b0972 3291 return align;
95727fb8
AP
3292}
3293
58182de3
GK
3294/* Darwin increases record alignment to the natural alignment of
3295 the first field. */
3296
3297unsigned int
3298darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3299 unsigned int specified)
3300{
3301 unsigned int align = MAX (computed, specified);
3302
3303 if (TYPE_PACKED (type))
3304 return align;
3305
3306 /* Find the first field, looking down into aggregates. */
3307 do {
3308 tree field = TYPE_FIELDS (type);
3309 /* Skip all non field decls */
3310 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3311 field = TREE_CHAIN (field);
3312 if (! field)
3313 break;
3314 type = TREE_TYPE (field);
3315 while (TREE_CODE (type) == ARRAY_TYPE)
3316 type = TREE_TYPE (type);
3317 } while (AGGREGATE_TYPE_P (type));
3318
3319 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3320 align = MAX (align, TYPE_ALIGN (type));
3321
3322 return align;
3323}
3324
a4f6c312 3325/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3326
3327int
f676971a 3328small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3329 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3330{
38c1f2d7 3331#if TARGET_ELF
5f59ecb7 3332 rtx sym_ref;
7509c759 3333
d9407988 3334 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3335 return 0;
a54d04b7 3336
f607bc57 3337 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3338 return 0;
3339
2aa42e6e
NF
3340 /* Vector and float memory instructions have a limited offset on the
3341 SPE, so using a vector or float variable directly as an operand is
3342 not useful. */
3343 if (TARGET_SPE
3344 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3345 return 0;
3346
88228c4b
MM
3347 if (GET_CODE (op) == SYMBOL_REF)
3348 sym_ref = op;
3349
3350 else if (GET_CODE (op) != CONST
3351 || GET_CODE (XEXP (op, 0)) != PLUS
3352 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3353 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3354 return 0;
3355
88228c4b 3356 else
dbf55e53
MM
3357 {
3358 rtx sum = XEXP (op, 0);
3359 HOST_WIDE_INT summand;
3360
3361 /* We have to be careful here, because it is the referenced address
c4ad648e 3362 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3363 summand = INTVAL (XEXP (sum, 1));
307b599c 3364 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3365 return 0;
dbf55e53
MM
3366
3367 sym_ref = XEXP (sum, 0);
3368 }
88228c4b 3369
20bfcd69 3370 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3371#else
3372 return 0;
3373#endif
7509c759 3374}
46c07df8 3375
3a1f863f 3376/* Return true if either operand is a general purpose register. */
46c07df8 3377
3a1f863f
DE
3378bool
3379gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3380{
3a1f863f
DE
3381 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3382 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3383}
3384
9ebbca7d 3385\f
4d588c14
RH
3386/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3387
f676971a
EC
3388static int
3389constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3390{
9390387d 3391 switch (GET_CODE (op))
9ebbca7d
GK
3392 {
3393 case SYMBOL_REF:
c4501e62
JJ
3394 if (RS6000_SYMBOL_REF_TLS_P (op))
3395 return 0;
3396 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3397 {
3398 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3399 {
3400 *have_sym = 1;
3401 return 1;
3402 }
3403 else
3404 return 0;
3405 }
3406 else if (! strcmp (XSTR (op, 0), toc_label_name))
3407 {
3408 *have_toc = 1;
3409 return 1;
3410 }
3411 else
3412 return 0;
9ebbca7d
GK
3413 case PLUS:
3414 case MINUS:
c1f11548
DE
3415 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3416 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3417 case CONST:
a4f6c312 3418 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3419 case CONST_INT:
a4f6c312 3420 return 1;
9ebbca7d 3421 default:
a4f6c312 3422 return 0;
9ebbca7d
GK
3423 }
3424}
3425
4d588c14 3426static bool
a2369ed3 3427constant_pool_expr_p (rtx op)
9ebbca7d
GK
3428{
3429 int have_sym = 0;
3430 int have_toc = 0;
3431 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3432}
3433
48d72335 3434bool
a2369ed3 3435toc_relative_expr_p (rtx op)
9ebbca7d 3436{
4d588c14
RH
3437 int have_sym = 0;
3438 int have_toc = 0;
3439 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3440}
3441
4d588c14 3442bool
a2369ed3 3443legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3444{
3445 return (TARGET_TOC
3446 && GET_CODE (x) == PLUS
3447 && GET_CODE (XEXP (x, 0)) == REG
3448 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3449 && constant_pool_expr_p (XEXP (x, 1)));
3450}
3451
d04b6e6e
EB
3452static bool
3453legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3454{
3455 return (DEFAULT_ABI == ABI_V4
3456 && !flag_pic && !TARGET_TOC
3457 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3458 && small_data_operand (x, mode));
3459}
3460
60cdabab
DE
3461/* SPE offset addressing is limited to 5-bits worth of double words. */
3462#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3463
76d2b81d
DJ
3464bool
3465rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3466{
3467 unsigned HOST_WIDE_INT offset, extra;
3468
3469 if (GET_CODE (x) != PLUS)
3470 return false;
3471 if (GET_CODE (XEXP (x, 0)) != REG)
3472 return false;
3473 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3474 return false;
60cdabab
DE
3475 if (legitimate_constant_pool_address_p (x))
3476 return true;
4d588c14
RH
3477 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3478 return false;
3479
3480 offset = INTVAL (XEXP (x, 1));
3481 extra = 0;
3482 switch (mode)
3483 {
3484 case V16QImode:
3485 case V8HImode:
3486 case V4SFmode:
3487 case V4SImode:
7a4eca66 3488 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3489 constant offset zero should not occur due to canonicalization. */
3490 return false;
4d588c14
RH
3491
3492 case V4HImode:
3493 case V2SImode:
3494 case V1DImode:
3495 case V2SFmode:
d42a3bae 3496 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3497 constant offset zero should not occur due to canonicalization. */
d42a3bae 3498 if (TARGET_PAIRED_FLOAT)
1a23970d 3499 return false;
4d588c14
RH
3500 /* SPE vector modes. */
3501 return SPE_CONST_OFFSET_OK (offset);
3502
3503 case DFmode:
4d4cbc0e
AH
3504 if (TARGET_E500_DOUBLE)
3505 return SPE_CONST_OFFSET_OK (offset);
3506
4f011e1e 3507 case DDmode:
4d588c14 3508 case DImode:
54b695e7
AH
3509 /* On e500v2, we may have:
3510
3511 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3512
3513 Which gets addressed with evldd instructions. */
3514 if (TARGET_E500_DOUBLE)
3515 return SPE_CONST_OFFSET_OK (offset);
3516
7393f7f8 3517 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3518 extra = 4;
3519 else if (offset & 3)
3520 return false;
3521 break;
3522
3523 case TFmode:
17caeff2
JM
3524 if (TARGET_E500_DOUBLE)
3525 return (SPE_CONST_OFFSET_OK (offset)
3526 && SPE_CONST_OFFSET_OK (offset + 8));
3527
4f011e1e 3528 case TDmode:
4d588c14 3529 case TImode:
7393f7f8 3530 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3531 extra = 12;
3532 else if (offset & 3)
3533 return false;
3534 else
3535 extra = 8;
3536 break;
3537
3538 default:
3539 break;
3540 }
3541
b1917422
AM
3542 offset += 0x8000;
3543 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3544}
3545
6fb5fa3c 3546bool
a2369ed3 3547legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3548{
3549 rtx op0, op1;
3550
3551 if (GET_CODE (x) != PLUS)
3552 return false;
850e8d3d 3553
4d588c14
RH
3554 op0 = XEXP (x, 0);
3555 op1 = XEXP (x, 1);
3556
bf00cc0f 3557 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3558 replaced with proper base and index regs. */
3559 if (!strict
3560 && reload_in_progress
3561 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3562 && REG_P (op1))
3563 return true;
3564
3565 return (REG_P (op0) && REG_P (op1)
3566 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3567 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3568 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3569 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3570}
3571
48d72335 3572inline bool
a2369ed3 3573legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3574{
3575 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3576}
3577
48d72335 3578bool
4c81e946
FJ
3579macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3580{
c4ad648e 3581 if (!TARGET_MACHO || !flag_pic
9390387d 3582 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3583 return false;
3584 x = XEXP (x, 0);
4c81e946
FJ
3585
3586 if (GET_CODE (x) != LO_SUM)
3587 return false;
3588 if (GET_CODE (XEXP (x, 0)) != REG)
3589 return false;
3590 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3591 return false;
3592 x = XEXP (x, 1);
3593
3594 return CONSTANT_P (x);
3595}
3596
4d588c14 3597static bool
a2369ed3 3598legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3599{
3600 if (GET_CODE (x) != LO_SUM)
3601 return false;
3602 if (GET_CODE (XEXP (x, 0)) != REG)
3603 return false;
3604 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3605 return false;
54b695e7 3606 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3607 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3608 || mode == DDmode || mode == TDmode
17caeff2 3609 || mode == DImode))
f82f556d 3610 return false;
4d588c14
RH
3611 x = XEXP (x, 1);
3612
8622e235 3613 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3614 {
a29077da 3615 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3616 return false;
3617 if (TARGET_TOC)
3618 return false;
3619 if (GET_MODE_NUNITS (mode) != 1)
3620 return false;
5e5f01b9 3621 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3622 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3623 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3624 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3625 return false;
3626
3627 return CONSTANT_P (x);
3628 }
3629
3630 return false;
3631}
3632
3633
9ebbca7d
GK
3634/* Try machine-dependent ways of modifying an illegitimate address
3635 to be legitimate. If we find one, return the new, valid address.
3636 This is used from only one place: `memory_address' in explow.c.
3637
a4f6c312
SS
3638 OLDX is the address as it was before break_out_memory_refs was
3639 called. In some cases it is useful to look at this to decide what
3640 needs to be done.
9ebbca7d 3641
a4f6c312 3642 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3643
a4f6c312
SS
3644 It is always safe for this function to do nothing. It exists to
3645 recognize opportunities to optimize the output.
9ebbca7d
GK
3646
3647 On RS/6000, first check for the sum of a register with a constant
3648 integer that is out of range. If so, generate code to add the
3649 constant with the low-order 16 bits masked to the register and force
3650 this result into another register (this can be done with `cau').
3651 Then generate an address of REG+(CONST&0xffff), allowing for the
3652 possibility of bit 16 being a one.
3653
3654 Then check for the sum of a register and something not constant, try to
3655 load the other things into a register and return the sum. */
4d588c14 3656
9ebbca7d 3657rtx
a2369ed3
DJ
3658rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3659 enum machine_mode mode)
0ac081f6 3660{
c4501e62
JJ
3661 if (GET_CODE (x) == SYMBOL_REF)
3662 {
3663 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3664 if (model != 0)
3665 return rs6000_legitimize_tls_address (x, model);
3666 }
3667
f676971a 3668 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3669 && GET_CODE (XEXP (x, 0)) == REG
3670 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3671 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3672 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3673 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3674 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3675 || mode == DImode || mode == DDmode
3676 || mode == TDmode))))
f676971a 3677 {
9ebbca7d
GK
3678 HOST_WIDE_INT high_int, low_int;
3679 rtx sum;
a65c591c
DE
3680 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3681 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3682 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3683 GEN_INT (high_int)), 0);
3684 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3685 }
f676971a 3686 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3687 && GET_CODE (XEXP (x, 0)) == REG
3688 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3689 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3690 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3691 || TARGET_POWERPC64
efc05e3c 3692 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3693 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3694 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3695 && mode != TImode
3696 && mode != TFmode
3697 && mode != TDmode)
9ebbca7d
GK
3698 {
3699 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3700 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3701 }
0ac081f6
AH
3702 else if (ALTIVEC_VECTOR_MODE (mode))
3703 {
3704 rtx reg;
3705
3706 /* Make sure both operands are registers. */
3707 if (GET_CODE (x) == PLUS)
9f85ed45 3708 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3709 force_reg (Pmode, XEXP (x, 1)));
3710
3711 reg = force_reg (Pmode, x);
3712 return reg;
3713 }
4d4cbc0e 3714 else if (SPE_VECTOR_MODE (mode)
17caeff2 3715 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3716 || mode == DDmode || mode == TDmode
54b695e7 3717 || mode == DImode)))
a3170dc6 3718 {
54b695e7
AH
3719 if (mode == DImode)
3720 return NULL_RTX;
a3170dc6
AH
3721 /* We accept [reg + reg] and [reg + OFFSET]. */
3722
3723 if (GET_CODE (x) == PLUS)
61dd226f
NF
3724 {
3725 rtx op1 = XEXP (x, 0);
3726 rtx op2 = XEXP (x, 1);
3727 rtx y;
3728
3729 op1 = force_reg (Pmode, op1);
3730
3731 if (GET_CODE (op2) != REG
3732 && (GET_CODE (op2) != CONST_INT
3733 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3734 || (GET_MODE_SIZE (mode) > 8
3735 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3736 op2 = force_reg (Pmode, op2);
3737
3738 /* We can't always do [reg + reg] for these, because [reg +
3739 reg + offset] is not a legitimate addressing mode. */
3740 y = gen_rtx_PLUS (Pmode, op1, op2);
3741
4f011e1e 3742 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3743 return force_reg (Pmode, y);
3744 else
3745 return y;
3746 }
a3170dc6
AH
3747
3748 return force_reg (Pmode, x);
3749 }
f1384257
AM
3750 else if (TARGET_ELF
3751 && TARGET_32BIT
3752 && TARGET_NO_TOC
3753 && ! flag_pic
9ebbca7d 3754 && GET_CODE (x) != CONST_INT
f676971a 3755 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3756 && CONSTANT_P (x)
6ac7bf2c
GK
3757 && GET_MODE_NUNITS (mode) == 1
3758 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3759 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3760 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3761 {
3762 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3763 emit_insn (gen_elf_high (reg, x));
3764 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3765 }
ee890fe2
SS
3766 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3767 && ! flag_pic
ab82a49f
AP
3768#if TARGET_MACHO
3769 && ! MACHO_DYNAMIC_NO_PIC_P
3770#endif
ee890fe2 3771 && GET_CODE (x) != CONST_INT
f676971a 3772 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3773 && CONSTANT_P (x)
4d4447b5
PB
3774 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3775 || (mode != DFmode && mode != DDmode))
f676971a 3776 && mode != DImode
ee890fe2
SS
3777 && mode != TImode)
3778 {
3779 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3780 emit_insn (gen_macho_high (reg, x));
3781 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3782 }
f676971a 3783 else if (TARGET_TOC
0cdc04e8 3784 && GET_CODE (x) == SYMBOL_REF
4d588c14 3785 && constant_pool_expr_p (x)
a9098fd0 3786 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3787 {
3788 return create_TOC_reference (x);
3789 }
3790 else
3791 return NULL_RTX;
3792}
258bfae2 3793
fdbe66f2 3794/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3795 We need to emit DTP-relative relocations. */
3796
fdbe66f2 3797static void
c973d557
JJ
3798rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3799{
3800 switch (size)
3801 {
3802 case 4:
3803 fputs ("\t.long\t", file);
3804 break;
3805 case 8:
3806 fputs (DOUBLE_INT_ASM_OP, file);
3807 break;
3808 default:
37409796 3809 gcc_unreachable ();
c973d557
JJ
3810 }
3811 output_addr_const (file, x);
3812 fputs ("@dtprel+0x8000", file);
3813}
3814
c4501e62
JJ
3815/* Construct the SYMBOL_REF for the tls_get_addr function. */
3816
3817static GTY(()) rtx rs6000_tls_symbol;
3818static rtx
863d938c 3819rs6000_tls_get_addr (void)
c4501e62
JJ
3820{
3821 if (!rs6000_tls_symbol)
3822 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3823
3824 return rs6000_tls_symbol;
3825}
3826
3827/* Construct the SYMBOL_REF for TLS GOT references. */
3828
3829static GTY(()) rtx rs6000_got_symbol;
3830static rtx
863d938c 3831rs6000_got_sym (void)
c4501e62
JJ
3832{
3833 if (!rs6000_got_symbol)
3834 {
3835 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3836 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3837 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3838 }
c4501e62
JJ
3839
3840 return rs6000_got_symbol;
3841}
3842
3843/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3844 this (thread-local) address. */
3845
3846static rtx
a2369ed3 3847rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3848{
3849 rtx dest, insn;
3850
3851 dest = gen_reg_rtx (Pmode);
3852 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3853 {
3854 rtx tlsreg;
3855
3856 if (TARGET_64BIT)
3857 {
3858 tlsreg = gen_rtx_REG (Pmode, 13);
3859 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3860 }
3861 else
3862 {
3863 tlsreg = gen_rtx_REG (Pmode, 2);
3864 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3865 }
3866 emit_insn (insn);
3867 }
3868 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3869 {
3870 rtx tlsreg, tmp;
3871
3872 tmp = gen_reg_rtx (Pmode);
3873 if (TARGET_64BIT)
3874 {
3875 tlsreg = gen_rtx_REG (Pmode, 13);
3876 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3877 }
3878 else
3879 {
3880 tlsreg = gen_rtx_REG (Pmode, 2);
3881 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3882 }
3883 emit_insn (insn);
3884 if (TARGET_64BIT)
3885 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3886 else
3887 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3888 emit_insn (insn);
3889 }
3890 else
3891 {
3892 rtx r3, got, tga, tmp1, tmp2, eqv;
3893
4fed8f8f
AM
3894 /* We currently use relocations like @got@tlsgd for tls, which
3895 means the linker will handle allocation of tls entries, placing
3896 them in the .got section. So use a pointer to the .got section,
3897 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3898 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3899 if (TARGET_64BIT)
972f427b 3900 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3901 else
3902 {
3903 if (flag_pic == 1)
3904 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3905 else
3906 {
3907 rtx gsym = rs6000_got_sym ();
3908 got = gen_reg_rtx (Pmode);
3909 if (flag_pic == 0)
3910 rs6000_emit_move (got, gsym, Pmode);
3911 else
3912 {
e65a3857 3913 rtx tmp3, mem;
c4501e62
JJ
3914 rtx first, last;
3915
c4501e62
JJ
3916 tmp1 = gen_reg_rtx (Pmode);
3917 tmp2 = gen_reg_rtx (Pmode);
3918 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3919 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3920
e65a3857
DE
3921 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3922 emit_move_insn (tmp1,
1de43f85 3923 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3924 emit_move_insn (tmp2, mem);
3925 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3926 last = emit_move_insn (got, tmp3);
bd94cb6e 3927 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
3928 }
3929 }
3930 }
3931
3932 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3933 {
3934 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
3935 tga = rs6000_tls_get_addr ();
3936
3937 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3938 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
3939 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
3940 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
3941 else if (DEFAULT_ABI == ABI_V4)
3942 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 3943 else
02135bc1
SB
3944 gcc_unreachable ();
3945
c4501e62 3946 start_sequence ();
c4501e62 3947 insn = emit_call_insn (insn);
becfd6e5 3948 RTL_CONST_CALL_P (insn) = 1;
c4501e62
JJ
3949 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3950 insn = get_insns ();
3951 end_sequence ();
3952 emit_libcall_block (insn, dest, r3, addr);
3953 }
3954 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3955 {
3956 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
3957 tga = rs6000_tls_get_addr ();
3958
3959 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3960 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
3961 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
3962 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
3963 else if (DEFAULT_ABI == ABI_V4)
3964 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 3965 else
02135bc1
SB
3966 gcc_unreachable ();
3967
c4501e62 3968 start_sequence ();
c4501e62 3969 insn = emit_call_insn (insn);
becfd6e5 3970 RTL_CONST_CALL_P (insn) = 1;
c4501e62
JJ
3971 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3972 insn = get_insns ();
3973 end_sequence ();
3974 tmp1 = gen_reg_rtx (Pmode);
3975 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3976 UNSPEC_TLSLD);
3977 emit_libcall_block (insn, tmp1, r3, eqv);
3978 if (rs6000_tls_size == 16)
3979 {
3980 if (TARGET_64BIT)
3981 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3982 else
3983 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3984 }
3985 else if (rs6000_tls_size == 32)
3986 {
3987 tmp2 = gen_reg_rtx (Pmode);
3988 if (TARGET_64BIT)
3989 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3990 else
3991 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3992 emit_insn (insn);
3993 if (TARGET_64BIT)
3994 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3995 else
3996 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3997 }
3998 else
3999 {
4000 tmp2 = gen_reg_rtx (Pmode);
4001 if (TARGET_64BIT)
4002 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4003 else
4004 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4005 emit_insn (insn);
4006 insn = gen_rtx_SET (Pmode, dest,
4007 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4008 }
4009 emit_insn (insn);
4010 }
4011 else
4012 {
a7b376ee 4013 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4014 tmp2 = gen_reg_rtx (Pmode);
4015 if (TARGET_64BIT)
4016 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4017 else
4018 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4019 emit_insn (insn);
4020 if (TARGET_64BIT)
4021 insn = gen_tls_tls_64 (dest, tmp2, addr);
4022 else
4023 insn = gen_tls_tls_32 (dest, tmp2, addr);
4024 emit_insn (insn);
4025 }
4026 }
4027
4028 return dest;
4029}
4030
c4501e62
JJ
4031/* Return 1 if X contains a thread-local symbol. */
4032
4033bool
a2369ed3 4034rs6000_tls_referenced_p (rtx x)
c4501e62 4035{
cd413cab
AP
4036 if (! TARGET_HAVE_TLS)
4037 return false;
4038
c4501e62
JJ
4039 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4040}
4041
4042/* Return 1 if *X is a thread-local symbol. This is the same as
4043 rs6000_tls_symbol_ref except for the type of the unused argument. */
4044
9390387d 4045static int
a2369ed3 4046rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4047{
4048 return RS6000_SYMBOL_REF_TLS_P (*x);
4049}
4050
24ea750e
DJ
4051/* The convention appears to be to define this wherever it is used.
4052 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4053 is now used here. */
4054#ifndef REG_MODE_OK_FOR_BASE_P
4055#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4056#endif
4057
4058/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4059 replace the input X, or the original X if no replacement is called for.
4060 The output parameter *WIN is 1 if the calling macro should goto WIN,
4061 0 if it should not.
4062
4063 For RS/6000, we wish to handle large displacements off a base
4064 register by splitting the addend across an addiu/addis and the mem insn.
4065 This cuts number of extra insns needed from 3 to 1.
4066
4067 On Darwin, we use this to generate code for floating point constants.
4068 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4069 The Darwin code is inside #if TARGET_MACHO because only then is
4070 machopic_function_base_name() defined. */
4071rtx
f676971a 4072rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4073 int opnum, int type,
4074 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4075{
f676971a 4076 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4077 if (GET_CODE (x) == PLUS
4078 && GET_CODE (XEXP (x, 0)) == PLUS
4079 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4080 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4081 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4082 {
4083 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4084 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4085 opnum, (enum reload_type)type);
24ea750e
DJ
4086 *win = 1;
4087 return x;
4088 }
3deb2758 4089
24ea750e
DJ
4090#if TARGET_MACHO
4091 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4092 && GET_CODE (x) == LO_SUM
4093 && GET_CODE (XEXP (x, 0)) == PLUS
4094 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4095 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4096 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4097 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4098 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4099 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4100 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4101 {
4102 /* Result of previous invocation of this function on Darwin
6f317ef3 4103 floating point constant. */
24ea750e 4104 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4105 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4106 opnum, (enum reload_type)type);
24ea750e
DJ
4107 *win = 1;
4108 return x;
4109 }
4110#endif
4937d02d
DE
4111
4112 /* Force ld/std non-word aligned offset into base register by wrapping
4113 in offset 0. */
4114 if (GET_CODE (x) == PLUS
4115 && GET_CODE (XEXP (x, 0)) == REG
4116 && REGNO (XEXP (x, 0)) < 32
4117 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4118 && GET_CODE (XEXP (x, 1)) == CONST_INT
4119 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4120 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4121 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4122 && TARGET_POWERPC64)
4123 {
4124 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4125 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4126 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4127 opnum, (enum reload_type) type);
4128 *win = 1;
4129 return x;
4130 }
4131
24ea750e
DJ
4132 if (GET_CODE (x) == PLUS
4133 && GET_CODE (XEXP (x, 0)) == REG
4134 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4135 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4136 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4137 && !SPE_VECTOR_MODE (mode)
17caeff2 4138 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4139 || mode == DDmode || mode == TDmode
54b695e7 4140 || mode == DImode))
78c875e8 4141 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4142 {
4143 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4144 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4145 HOST_WIDE_INT high
c4ad648e 4146 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4147
4148 /* Check for 32-bit overflow. */
4149 if (high + low != val)
c4ad648e 4150 {
24ea750e
DJ
4151 *win = 0;
4152 return x;
4153 }
4154
4155 /* Reload the high part into a base reg; leave the low part
c4ad648e 4156 in the mem directly. */
24ea750e
DJ
4157
4158 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4159 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4160 GEN_INT (high)),
4161 GEN_INT (low));
24ea750e
DJ
4162
4163 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4164 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4165 opnum, (enum reload_type)type);
24ea750e
DJ
4166 *win = 1;
4167 return x;
4168 }
4937d02d 4169
24ea750e 4170 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4171 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4172 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4173#if TARGET_MACHO
4174 && DEFAULT_ABI == ABI_DARWIN
a29077da 4175 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4176#else
4177 && DEFAULT_ABI == ABI_V4
4178 && !flag_pic
4179#endif
7393f7f8 4180 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4181 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4182 without fprs. */
0d8c1c97 4183 && mode != TFmode
7393f7f8 4184 && mode != TDmode
7b5d92b2 4185 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4186 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4187 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4188 {
8308679f 4189#if TARGET_MACHO
a29077da
GK
4190 if (flag_pic)
4191 {
4192 rtx offset = gen_rtx_CONST (Pmode,
4193 gen_rtx_MINUS (Pmode, x,
11abc112 4194 machopic_function_base_sym ()));
a29077da
GK
4195 x = gen_rtx_LO_SUM (GET_MODE (x),
4196 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4197 gen_rtx_HIGH (Pmode, offset)), offset);
4198 }
4199 else
8308679f 4200#endif
a29077da 4201 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4202 gen_rtx_HIGH (Pmode, x), x);
a29077da 4203
24ea750e 4204 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4205 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4206 opnum, (enum reload_type)type);
24ea750e
DJ
4207 *win = 1;
4208 return x;
4209 }
4937d02d 4210
dec1f3aa
DE
4211 /* Reload an offset address wrapped by an AND that represents the
4212 masking of the lower bits. Strip the outer AND and let reload
4213 convert the offset address into an indirect address. */
4214 if (TARGET_ALTIVEC
4215 && ALTIVEC_VECTOR_MODE (mode)
4216 && GET_CODE (x) == AND
4217 && GET_CODE (XEXP (x, 0)) == PLUS
4218 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4219 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4220 && GET_CODE (XEXP (x, 1)) == CONST_INT
4221 && INTVAL (XEXP (x, 1)) == -16)
4222 {
4223 x = XEXP (x, 0);
4224 *win = 1;
4225 return x;
4226 }
4227
24ea750e 4228 if (TARGET_TOC
0cdc04e8 4229 && GET_CODE (x) == SYMBOL_REF
4d588c14 4230 && constant_pool_expr_p (x)
c1f11548 4231 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4232 {
194c524a 4233 x = create_TOC_reference (x);
24ea750e
DJ
4234 *win = 1;
4235 return x;
4236 }
4237 *win = 0;
4238 return x;
f676971a 4239}
24ea750e 4240
258bfae2
FS
4241/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4242 that is a valid memory address for an instruction.
4243 The MODE argument is the machine mode for the MEM expression
4244 that wants to use this address.
4245
4246 On the RS/6000, there are four valid address: a SYMBOL_REF that
4247 refers to a constant pool entry of an address (or the sum of it
4248 plus a constant), a short (16-bit signed) constant plus a register,
4249 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4250 auto-increment. For DFmode, DDmode and DImode with a constant plus
4251 register, we must ensure that both words are addressable or PowerPC64
4252 with offset word aligned.
258bfae2 4253
4d4447b5 4254 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4255 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4256 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4257 during assembly output. */
4258int
a2369ed3 4259rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4260{
850e8d3d
DN
4261 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4262 if (TARGET_ALTIVEC
4263 && ALTIVEC_VECTOR_MODE (mode)
4264 && GET_CODE (x) == AND
4265 && GET_CODE (XEXP (x, 1)) == CONST_INT
4266 && INTVAL (XEXP (x, 1)) == -16)
4267 x = XEXP (x, 0);
4268
c4501e62
JJ
4269 if (RS6000_SYMBOL_REF_TLS_P (x))
4270 return 0;
4d588c14 4271 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4272 return 1;
4273 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4274 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4275 && !SPE_VECTOR_MODE (mode)
429ec7dc 4276 && mode != TFmode
7393f7f8 4277 && mode != TDmode
54b695e7 4278 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4279 && !(TARGET_E500_DOUBLE
4280 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4281 && TARGET_UPDATE
4d588c14 4282 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4283 return 1;
d04b6e6e 4284 if (legitimate_small_data_p (mode, x))
258bfae2 4285 return 1;
4d588c14 4286 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4287 return 1;
4288 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4289 if (! reg_ok_strict
4290 && GET_CODE (x) == PLUS
4291 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4292 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4293 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4294 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4295 return 1;
76d2b81d 4296 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4297 return 1;
4298 if (mode != TImode
76d2b81d 4299 && mode != TFmode
7393f7f8 4300 && mode != TDmode
a3170dc6
AH
4301 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4302 || TARGET_POWERPC64
4f011e1e
JM
4303 || (mode != DFmode && mode != DDmode)
4304 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4305 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4306 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4307 return 1;
6fb5fa3c
DB
4308 if (GET_CODE (x) == PRE_MODIFY
4309 && mode != TImode
4310 && mode != TFmode
4311 && mode != TDmode
4312 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4313 || TARGET_POWERPC64
4d4447b5 4314 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4315 && (TARGET_POWERPC64 || mode != DImode)
4316 && !ALTIVEC_VECTOR_MODE (mode)
4317 && !SPE_VECTOR_MODE (mode)
4318 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4319 && !(TARGET_E500_DOUBLE
4320 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4321 && TARGET_UPDATE
4322 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4323 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4324 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4325 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4326 return 1;
4d588c14 4327 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4328 return 1;
4329 return 0;
4330}
4d588c14
RH
4331
4332/* Go to LABEL if ADDR (a legitimate address expression)
4333 has an effect that depends on the machine mode it is used for.
4334
4335 On the RS/6000 this is true of all integral offsets (since AltiVec
4336 modes don't allow them) or is a pre-increment or decrement.
4337
4338 ??? Except that due to conceptual problems in offsettable_address_p
4339 we can't really report the problems of integral offsets. So leave
f676971a 4340 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4341 sub-words of a TFmode operand, which is what we had before. */
4342
4343bool
a2369ed3 4344rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4345{
4346 switch (GET_CODE (addr))
4347 {
4348 case PLUS:
4349 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4350 {
4351 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4352 return val + 12 + 0x8000 >= 0x10000;
4353 }
4354 break;
4355
4356 case LO_SUM:
4357 return true;
4358
619fe064 4359 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4360 case PRE_MODIFY:
4361 return TARGET_UPDATE;
4d588c14
RH
4362
4363 default:
4364 break;
4365 }
4366
4367 return false;
4368}
d8ecbcdb 4369
d04b6e6e
EB
4370/* More elaborate version of recog's offsettable_memref_p predicate
4371 that works around the ??? note of rs6000_mode_dependent_address.
4372 In particular it accepts
4373
4374 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4375
4376 in 32-bit mode, that the recog predicate rejects. */
4377
4378bool
4379rs6000_offsettable_memref_p (rtx op)
4380{
4381 if (!MEM_P (op))
4382 return false;
4383
4384 /* First mimic offsettable_memref_p. */
4385 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4386 return true;
4387
4388 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4389 the latter predicate knows nothing about the mode of the memory
4390 reference and, therefore, assumes that it is the largest supported
4391 mode (TFmode). As a consequence, legitimate offsettable memory
4392 references are rejected. rs6000_legitimate_offset_address_p contains
4393 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4394 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4395}
4396
d8ecbcdb
AH
4397/* Return number of consecutive hard regs needed starting at reg REGNO
4398 to hold something of mode MODE.
4399 This is ordinarily the length in words of a value of mode MODE
4400 but can be less for certain modes in special long registers.
4401
4402 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4403 scalar instructions. The upper 32 bits are only available to the
4404 SIMD instructions.
4405
4406 POWER and PowerPC GPRs hold 32 bits worth;
4407 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4408
4409int
4410rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4411{
4412 if (FP_REGNO_P (regno))
4413 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4414
4415 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4416 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4417
4418 if (ALTIVEC_REGNO_P (regno))
4419 return
4420 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4421
8521c414
JM
4422 /* The value returned for SCmode in the E500 double case is 2 for
4423 ABI compatibility; storing an SCmode value in a single register
4424 would require function_arg and rs6000_spe_function_arg to handle
4425 SCmode so as to pass the value correctly in a pair of
4426 registers. */
4f011e1e
JM
4427 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4428 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4429 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4430
d8ecbcdb
AH
4431 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4432}
2aa4498c
AH
4433
4434/* Change register usage conditional on target flags. */
4435void
4436rs6000_conditional_register_usage (void)
4437{
4438 int i;
4439
4440 /* Set MQ register fixed (already call_used) if not POWER
4441 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4442 be allocated. */
4443 if (! TARGET_POWER)
4444 fixed_regs[64] = 1;
4445
7c9ac5c0 4446 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4447 if (TARGET_64BIT)
4448 fixed_regs[13] = call_used_regs[13]
4449 = call_really_used_regs[13] = 1;
4450
4451 /* Conditionally disable FPRs. */
4452 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4453 for (i = 32; i < 64; i++)
4454 fixed_regs[i] = call_used_regs[i]
c4ad648e 4455 = call_really_used_regs[i] = 1;
2aa4498c 4456
7c9ac5c0
PH
4457 /* The TOC register is not killed across calls in a way that is
4458 visible to the compiler. */
4459 if (DEFAULT_ABI == ABI_AIX)
4460 call_really_used_regs[2] = 0;
4461
2aa4498c
AH
4462 if (DEFAULT_ABI == ABI_V4
4463 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4464 && flag_pic == 2)
4465 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4466
4467 if (DEFAULT_ABI == ABI_V4
4468 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4469 && flag_pic == 1)
4470 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4471 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4472 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4473
4474 if (DEFAULT_ABI == ABI_DARWIN
4475 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4476 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4477 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4478 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4479
b4db40bf
JJ
4480 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4481 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4482 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4483
2aa4498c
AH
4484 if (TARGET_SPE)
4485 {
4486 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4487 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4488 registers in prologues and epilogues. We no longer use r14
4489 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4490 pool for link-compatibility with older versions of GCC. Once
4491 "old" code has died out, we can return r14 to the allocation
4492 pool. */
4493 fixed_regs[14]
4494 = call_used_regs[14]
4495 = call_really_used_regs[14] = 1;
2aa4498c
AH
4496 }
4497
0db747be 4498 if (!TARGET_ALTIVEC)
2aa4498c
AH
4499 {
4500 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4501 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4502 call_really_used_regs[VRSAVE_REGNO] = 1;
4503 }
4504
0db747be
DE
4505 if (TARGET_ALTIVEC)
4506 global_regs[VSCR_REGNO] = 1;
4507
2aa4498c 4508 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4509 {
4510 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4511 call_used_regs[i] = call_really_used_regs[i] = 1;
4512
4513 /* AIX reserves VR20:31 in non-extended ABI mode. */
4514 if (TARGET_XCOFF)
4515 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4516 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4517 }
2aa4498c 4518}
fb4d4348 4519\f
a4f6c312
SS
4520/* Try to output insns to set TARGET equal to the constant C if it can
4521 be done in less than N insns. Do all computations in MODE.
4522 Returns the place where the output has been placed if it can be
4523 done and the insns have been emitted. If it would take more than N
4524 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4525
4526rtx
f676971a 4527rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4528 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4529{
af8cb5c5 4530 rtx result, insn, set;
2bfcf297
DB
4531 HOST_WIDE_INT c0, c1;
4532
37409796 4533 switch (mode)
2bfcf297 4534 {
37409796
NS
4535 case QImode:
4536 case HImode:
2bfcf297 4537 if (dest == NULL)
c4ad648e 4538 dest = gen_reg_rtx (mode);
2bfcf297
DB
4539 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4540 return dest;
bb8df8a6 4541
37409796 4542 case SImode:
b3a13419 4543 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4544
d448860e 4545 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4546 GEN_INT (INTVAL (source)
4547 & (~ (HOST_WIDE_INT) 0xffff))));
4548 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4549 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4550 GEN_INT (INTVAL (source) & 0xffff))));
4551 result = dest;
37409796
NS
4552 break;
4553
4554 case DImode:
4555 switch (GET_CODE (source))
af8cb5c5 4556 {
37409796 4557 case CONST_INT:
af8cb5c5
DE
4558 c0 = INTVAL (source);
4559 c1 = -(c0 < 0);
37409796 4560 break;
bb8df8a6 4561
37409796 4562 case CONST_DOUBLE:
2bfcf297 4563#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4564 c0 = CONST_DOUBLE_LOW (source);
4565 c1 = -(c0 < 0);
2bfcf297 4566#else
af8cb5c5
DE
4567 c0 = CONST_DOUBLE_LOW (source);
4568 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4569#endif
37409796
NS
4570 break;
4571
4572 default:
4573 gcc_unreachable ();
af8cb5c5 4574 }
af8cb5c5
DE
4575
4576 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4577 break;
4578
4579 default:
4580 gcc_unreachable ();
2bfcf297 4581 }
2bfcf297 4582
af8cb5c5
DE
4583 insn = get_last_insn ();
4584 set = single_set (insn);
4585 if (! CONSTANT_P (SET_SRC (set)))
4586 set_unique_reg_note (insn, REG_EQUAL, source);
4587
4588 return result;
2bfcf297
DB
4589}
4590
4591/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4592 fall back to a straight forward decomposition. We do this to avoid
4593 exponential run times encountered when looking for longer sequences
4594 with rs6000_emit_set_const. */
4595static rtx
a2369ed3 4596rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4597{
4598 if (!TARGET_POWERPC64)
4599 {
4600 rtx operand1, operand2;
4601
4602 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4603 DImode);
d448860e 4604 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4605 DImode);
4606 emit_move_insn (operand1, GEN_INT (c1));
4607 emit_move_insn (operand2, GEN_INT (c2));
4608 }
4609 else
4610 {
bc06712d 4611 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4612
bc06712d 4613 ud1 = c1 & 0xffff;
f921c9c9 4614 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4615#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4616 c2 = c1 >> 32;
2bfcf297 4617#endif
bc06712d 4618 ud3 = c2 & 0xffff;
f921c9c9 4619 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4620
f676971a 4621 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4622 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4623 {
bc06712d 4624 if (ud1 & 0x8000)
b78d48dd 4625 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4626 else
4627 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4628 }
2bfcf297 4629
f676971a 4630 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4631 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4632 {
bc06712d 4633 if (ud2 & 0x8000)
f676971a 4634 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4635 - 0x80000000));
252b88f7 4636 else
bc06712d
TR
4637 emit_move_insn (dest, GEN_INT (ud2 << 16));
4638 if (ud1 != 0)
d448860e
JH
4639 emit_move_insn (copy_rtx (dest),
4640 gen_rtx_IOR (DImode, copy_rtx (dest),
4641 GEN_INT (ud1)));
252b88f7 4642 }
f676971a 4643 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4644 || (ud4 == 0 && ! (ud3 & 0x8000)))
4645 {
4646 if (ud3 & 0x8000)
f676971a 4647 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4648 - 0x80000000));
4649 else
4650 emit_move_insn (dest, GEN_INT (ud3 << 16));
4651
4652 if (ud2 != 0)
d448860e
JH
4653 emit_move_insn (copy_rtx (dest),
4654 gen_rtx_IOR (DImode, copy_rtx (dest),
4655 GEN_INT (ud2)));
4656 emit_move_insn (copy_rtx (dest),
4657 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4658 GEN_INT (16)));
bc06712d 4659 if (ud1 != 0)
d448860e
JH
4660 emit_move_insn (copy_rtx (dest),
4661 gen_rtx_IOR (DImode, copy_rtx (dest),
4662 GEN_INT (ud1)));
bc06712d 4663 }
f676971a 4664 else
bc06712d
TR
4665 {
4666 if (ud4 & 0x8000)
f676971a 4667 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4668 - 0x80000000));
4669 else
4670 emit_move_insn (dest, GEN_INT (ud4 << 16));
4671
4672 if (ud3 != 0)
d448860e
JH
4673 emit_move_insn (copy_rtx (dest),
4674 gen_rtx_IOR (DImode, copy_rtx (dest),
4675 GEN_INT (ud3)));
2bfcf297 4676
d448860e
JH
4677 emit_move_insn (copy_rtx (dest),
4678 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4679 GEN_INT (32)));
bc06712d 4680 if (ud2 != 0)
d448860e
JH
4681 emit_move_insn (copy_rtx (dest),
4682 gen_rtx_IOR (DImode, copy_rtx (dest),
4683 GEN_INT (ud2 << 16)));
bc06712d 4684 if (ud1 != 0)
d448860e
JH
4685 emit_move_insn (copy_rtx (dest),
4686 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4687 }
4688 }
2bfcf297
DB
4689 return dest;
4690}
4691
76d2b81d 4692/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4693 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4694
4695static void
4696rs6000_eliminate_indexed_memrefs (rtx operands[2])
4697{
4698 if (GET_CODE (operands[0]) == MEM
4699 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4700 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4701 && ! reload_in_progress)
4702 operands[0]
4703 = replace_equiv_address (operands[0],
4704 copy_addr_to_reg (XEXP (operands[0], 0)));
4705
4706 if (GET_CODE (operands[1]) == MEM
4707 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4708 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4709 && ! reload_in_progress)
4710 operands[1]
4711 = replace_equiv_address (operands[1],
4712 copy_addr_to_reg (XEXP (operands[1], 0)));
4713}
4714
fb4d4348
GK
4715/* Emit a move from SOURCE to DEST in mode MODE. */
4716void
a2369ed3 4717rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4718{
4719 rtx operands[2];
4720 operands[0] = dest;
4721 operands[1] = source;
f676971a 4722
fb4d4348
GK
4723 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4724 if (GET_CODE (operands[1]) == CONST_DOUBLE
4725 && ! FLOAT_MODE_P (mode)
4726 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4727 {
4728 /* FIXME. This should never happen. */
4729 /* Since it seems that it does, do the safe thing and convert
4730 to a CONST_INT. */
2496c7bd 4731 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4732 }
37409796
NS
4733 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4734 || FLOAT_MODE_P (mode)
4735 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4736 || CONST_DOUBLE_LOW (operands[1]) < 0)
4737 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4738 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4739
c9e8cb32
DD
4740 /* Check if GCC is setting up a block move that will end up using FP
4741 registers as temporaries. We must make sure this is acceptable. */
4742 if (GET_CODE (operands[0]) == MEM
4743 && GET_CODE (operands[1]) == MEM
4744 && mode == DImode
41543739
GK
4745 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4746 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4747 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4748 ? 32 : MEM_ALIGN (operands[0])))
4749 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4750 ? 32
41543739
GK
4751 : MEM_ALIGN (operands[1]))))
4752 && ! MEM_VOLATILE_P (operands [0])
4753 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4754 {
41543739
GK
4755 emit_move_insn (adjust_address (operands[0], SImode, 0),
4756 adjust_address (operands[1], SImode, 0));
d448860e
JH
4757 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4758 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4759 return;
4760 }
630d42a0 4761
b3a13419 4762 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4763 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4764 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4765
a3170dc6
AH
4766 if (mode == SFmode && ! TARGET_POWERPC
4767 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4768 && GET_CODE (operands[0]) == MEM)
fb4d4348 4769 {
ffc14f31
GK
4770 int regnum;
4771
4772 if (reload_in_progress || reload_completed)
4773 regnum = true_regnum (operands[1]);
4774 else if (GET_CODE (operands[1]) == REG)
4775 regnum = REGNO (operands[1]);
4776 else
4777 regnum = -1;
f676971a 4778
fb4d4348
GK
4779 /* If operands[1] is a register, on POWER it may have
4780 double-precision data in it, so truncate it to single
4781 precision. */
4782 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4783 {
4784 rtx newreg;
b3a13419 4785 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4786 : gen_reg_rtx (mode));
fb4d4348
GK
4787 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4788 operands[1] = newreg;
4789 }
4790 }
4791
c4501e62
JJ
4792 /* Recognize the case where operand[1] is a reference to thread-local
4793 data and load its address to a register. */
84f52ebd 4794 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4795 {
84f52ebd
RH
4796 enum tls_model model;
4797 rtx tmp = operands[1];
4798 rtx addend = NULL;
4799
4800 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4801 {
4802 addend = XEXP (XEXP (tmp, 0), 1);
4803 tmp = XEXP (XEXP (tmp, 0), 0);
4804 }
4805
4806 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4807 model = SYMBOL_REF_TLS_MODEL (tmp);
4808 gcc_assert (model != 0);
4809
4810 tmp = rs6000_legitimize_tls_address (tmp, model);
4811 if (addend)
4812 {
4813 tmp = gen_rtx_PLUS (mode, tmp, addend);
4814 tmp = force_operand (tmp, operands[0]);
4815 }
4816 operands[1] = tmp;
c4501e62
JJ
4817 }
4818
8f4e6caf
RH
4819 /* Handle the case where reload calls us with an invalid address. */
4820 if (reload_in_progress && mode == Pmode
69ef87e2 4821 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4822 || ! nonimmediate_operand (operands[0], mode)))
4823 goto emit_set;
4824
a9baceb1
GK
4825 /* 128-bit constant floating-point values on Darwin should really be
4826 loaded as two parts. */
8521c414 4827 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4828 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4829 {
4830 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4831 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4832 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4833 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4834 simplify_gen_subreg (imode, operands[1], mode, 0),
4835 imode);
4836 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4837 GET_MODE_SIZE (imode)),
4838 simplify_gen_subreg (imode, operands[1], mode,
4839 GET_MODE_SIZE (imode)),
4840 imode);
a9baceb1
GK
4841 return;
4842 }
4843
e41b2a33
PB
4844 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4845 cfun->machine->sdmode_stack_slot =
4846 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4847
4848 if (reload_in_progress
4849 && mode == SDmode
4850 && MEM_P (operands[0])
4851 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4852 && REG_P (operands[1]))
4853 {
4854 if (FP_REGNO_P (REGNO (operands[1])))
4855 {
4856 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4857 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4858 emit_insn (gen_movsd_store (mem, operands[1]));
4859 }
4860 else if (INT_REGNO_P (REGNO (operands[1])))
4861 {
4862 rtx mem = adjust_address_nv (operands[0], mode, 4);
4863 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4864 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4865 }
4866 else
4867 gcc_unreachable();
4868 return;
4869 }
4870 if (reload_in_progress
4871 && mode == SDmode
4872 && REG_P (operands[0])
4873 && MEM_P (operands[1])
4874 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4875 {
4876 if (FP_REGNO_P (REGNO (operands[0])))
4877 {
4878 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4879 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4880 emit_insn (gen_movsd_load (operands[0], mem));
4881 }
4882 else if (INT_REGNO_P (REGNO (operands[0])))
4883 {
4884 rtx mem = adjust_address_nv (operands[1], mode, 4);
4885 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4886 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4887 }
4888 else
4889 gcc_unreachable();
4890 return;
4891 }
4892
fb4d4348
GK
4893 /* FIXME: In the long term, this switch statement should go away
4894 and be replaced by a sequence of tests based on things like
4895 mode == Pmode. */
4896 switch (mode)
4897 {
4898 case HImode:
4899 case QImode:
4900 if (CONSTANT_P (operands[1])
4901 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4902 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4903 break;
4904
06f4e019 4905 case TFmode:
7393f7f8 4906 case TDmode:
76d2b81d
DJ
4907 rs6000_eliminate_indexed_memrefs (operands);
4908 /* fall through */
4909
fb4d4348 4910 case DFmode:
7393f7f8 4911 case DDmode:
fb4d4348 4912 case SFmode:
e41b2a33 4913 case SDmode:
f676971a 4914 if (CONSTANT_P (operands[1])
fb4d4348 4915 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4916 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4917 break;
f676971a 4918
0ac081f6
AH
4919 case V16QImode:
4920 case V8HImode:
4921 case V4SFmode:
4922 case V4SImode:
a3170dc6
AH
4923 case V4HImode:
4924 case V2SFmode:
4925 case V2SImode:
00a892b8 4926 case V1DImode:
69ef87e2 4927 if (CONSTANT_P (operands[1])
d744e06e 4928 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4929 operands[1] = force_const_mem (mode, operands[1]);
4930 break;
f676971a 4931
fb4d4348 4932 case SImode:
a9098fd0 4933 case DImode:
fb4d4348
GK
4934 /* Use default pattern for address of ELF small data */
4935 if (TARGET_ELF
a9098fd0 4936 && mode == Pmode
f607bc57 4937 && DEFAULT_ABI == ABI_V4
f676971a 4938 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4939 || GET_CODE (operands[1]) == CONST)
4940 && small_data_operand (operands[1], mode))
fb4d4348
GK
4941 {
4942 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4943 return;
4944 }
4945
f607bc57 4946 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4947 && mode == Pmode && mode == SImode
4948 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4949 {
4950 emit_insn (gen_movsi_got (operands[0], operands[1]));
4951 return;
4952 }
4953
ee890fe2 4954 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4955 && TARGET_NO_TOC
4956 && ! flag_pic
a9098fd0 4957 && mode == Pmode
fb4d4348
GK
4958 && CONSTANT_P (operands[1])
4959 && GET_CODE (operands[1]) != HIGH
4960 && GET_CODE (operands[1]) != CONST_INT)
4961 {
b3a13419
ILT
4962 rtx target = (!can_create_pseudo_p ()
4963 ? operands[0]
4964 : gen_reg_rtx (mode));
fb4d4348
GK
4965
4966 /* If this is a function address on -mcall-aixdesc,
4967 convert it to the address of the descriptor. */
4968 if (DEFAULT_ABI == ABI_AIX
4969 && GET_CODE (operands[1]) == SYMBOL_REF
4970 && XSTR (operands[1], 0)[0] == '.')
4971 {
4972 const char *name = XSTR (operands[1], 0);
4973 rtx new_ref;
4974 while (*name == '.')
4975 name++;
4976 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4977 CONSTANT_POOL_ADDRESS_P (new_ref)
4978 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4979 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4980 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4981 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4982 operands[1] = new_ref;
4983 }
7509c759 4984
ee890fe2
SS
4985 if (DEFAULT_ABI == ABI_DARWIN)
4986 {
ab82a49f
AP
4987#if TARGET_MACHO
4988 if (MACHO_DYNAMIC_NO_PIC_P)
4989 {
4990 /* Take care of any required data indirection. */
4991 operands[1] = rs6000_machopic_legitimize_pic_address (
4992 operands[1], mode, operands[0]);
4993 if (operands[0] != operands[1])
4994 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4995 operands[0], operands[1]));
ab82a49f
AP
4996 return;
4997 }
4998#endif
b8a55285
AP
4999 emit_insn (gen_macho_high (target, operands[1]));
5000 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5001 return;
5002 }
5003
fb4d4348
GK
5004 emit_insn (gen_elf_high (target, operands[1]));
5005 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5006 return;
5007 }
5008
a9098fd0
GK
5009 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5010 and we have put it in the TOC, we just need to make a TOC-relative
5011 reference to it. */
5012 if (TARGET_TOC
5013 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5014 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5015 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5016 get_pool_mode (operands[1])))
fb4d4348 5017 {
a9098fd0 5018 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5019 }
a9098fd0
GK
5020 else if (mode == Pmode
5021 && CONSTANT_P (operands[1])
38886f37
AO
5022 && ((GET_CODE (operands[1]) != CONST_INT
5023 && ! easy_fp_constant (operands[1], mode))
5024 || (GET_CODE (operands[1]) == CONST_INT
5025 && num_insns_constant (operands[1], mode) > 2)
5026 || (GET_CODE (operands[0]) == REG
5027 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5028 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5029 && ! legitimate_constant_pool_address_p (operands[1])
5030 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5031 {
5032 /* Emit a USE operation so that the constant isn't deleted if
5033 expensive optimizations are turned on because nobody
5034 references it. This should only be done for operands that
5035 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5036 This should not be done for operands that contain LABEL_REFs.
5037 For now, we just handle the obvious case. */
5038 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5039 emit_use (operands[1]);
fb4d4348 5040
c859cda6 5041#if TARGET_MACHO
ee890fe2 5042 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5043 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5044 {
ee890fe2
SS
5045 operands[1] =
5046 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5047 operands[0]);
5048 if (operands[0] != operands[1])
5049 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5050 return;
5051 }
c859cda6 5052#endif
ee890fe2 5053
fb4d4348
GK
5054 /* If we are to limit the number of things we put in the TOC and
5055 this is a symbol plus a constant we can add in one insn,
5056 just put the symbol in the TOC and add the constant. Don't do
5057 this if reload is in progress. */
5058 if (GET_CODE (operands[1]) == CONST
5059 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5060 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5061 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5062 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5063 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5064 && ! side_effects_p (operands[0]))
5065 {
a4f6c312
SS
5066 rtx sym =
5067 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5068 rtx other = XEXP (XEXP (operands[1], 0), 1);
5069
a9098fd0
GK
5070 sym = force_reg (mode, sym);
5071 if (mode == SImode)
5072 emit_insn (gen_addsi3 (operands[0], sym, other));
5073 else
5074 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5075 return;
5076 }
5077
a9098fd0 5078 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5079
f676971a 5080 if (TARGET_TOC
0cdc04e8 5081 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5082 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5083 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5084 get_pool_constant (XEXP (operands[1], 0)),
5085 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5086 {
ba4828e0 5087 operands[1]
542a8afa 5088 = gen_const_mem (mode,
c4ad648e 5089 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5090 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5091 }
fb4d4348
GK
5092 }
5093 break;
a9098fd0 5094
fb4d4348 5095 case TImode:
76d2b81d
DJ
5096 rs6000_eliminate_indexed_memrefs (operands);
5097
27dc0551
DE
5098 if (TARGET_POWER)
5099 {
5100 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5101 gen_rtvec (2,
5102 gen_rtx_SET (VOIDmode,
5103 operands[0], operands[1]),
5104 gen_rtx_CLOBBER (VOIDmode,
5105 gen_rtx_SCRATCH (SImode)))));
5106 return;
5107 }
fb4d4348
GK
5108 break;
5109
5110 default:
37409796 5111 gcc_unreachable ();
fb4d4348
GK
5112 }
5113
a9098fd0
GK
5114 /* Above, we may have called force_const_mem which may have returned
5115 an invalid address. If we can, fix this up; otherwise, reload will
5116 have to deal with it. */
8f4e6caf
RH
5117 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5118 operands[1] = validize_mem (operands[1]);
a9098fd0 5119
8f4e6caf 5120 emit_set:
fb4d4348
GK
5121 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5122}
4697a36c 5123\f
2858f73a
GK
5124/* Nonzero if we can use a floating-point register to pass this arg. */
5125#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5126 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5127 && (CUM)->fregno <= FP_ARG_MAX_REG \
5128 && TARGET_HARD_FLOAT && TARGET_FPRS)
5129
5130/* Nonzero if we can use an AltiVec register to pass this arg. */
5131#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5132 (ALTIVEC_VECTOR_MODE (MODE) \
5133 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5134 && TARGET_ALTIVEC_ABI \
83953138 5135 && (NAMED))
2858f73a 5136
c6e8c921
GK
5137/* Return a nonzero value to say to return the function value in
5138 memory, just as large structures are always returned. TYPE will be
5139 the data type of the value, and FNTYPE will be the type of the
5140 function doing the returning, or @code{NULL} for libcalls.
5141
5142 The AIX ABI for the RS/6000 specifies that all structures are
5143 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5144 specifies that structures <= 8 bytes are returned in r3/r4, but a
5145 draft put them in memory, and GCC used to implement the draft
df01da37 5146 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5147 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5148 compatibility can change DRAFT_V4_STRUCT_RET to override the
5149 default, and -m switches get the final word. See
5150 rs6000_override_options for more details.
5151
5152 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5153 long double support is enabled. These values are returned in memory.
5154
5155 int_size_in_bytes returns -1 for variable size objects, which go in
5156 memory always. The cast to unsigned makes -1 > 8. */
5157
5158static bool
586de218 5159rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5160{
594a51fe
SS
5161 /* In the darwin64 abi, try to use registers for larger structs
5162 if possible. */
0b5383eb 5163 if (rs6000_darwin64_abi
594a51fe 5164 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5165 && int_size_in_bytes (type) > 0)
5166 {
5167 CUMULATIVE_ARGS valcum;
5168 rtx valret;
5169
5170 valcum.words = 0;
5171 valcum.fregno = FP_ARG_MIN_REG;
5172 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5173 /* Do a trial code generation as if this were going to be passed
5174 as an argument; if any part goes in memory, we return NULL. */
5175 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5176 if (valret)
5177 return false;
5178 /* Otherwise fall through to more conventional ABI rules. */
5179 }
594a51fe 5180
c6e8c921 5181 if (AGGREGATE_TYPE_P (type)
df01da37 5182 && (aix_struct_return
c6e8c921
GK
5183 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5184 return true;
b693336b 5185
bada2eb8
DE
5186 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5187 modes only exist for GCC vector types if -maltivec. */
5188 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5189 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5190 return false;
5191
b693336b
PB
5192 /* Return synthetic vectors in memory. */
5193 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5194 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5195 {
5196 static bool warned_for_return_big_vectors = false;
5197 if (!warned_for_return_big_vectors)
5198 {
d4ee4d25 5199 warning (0, "GCC vector returned by reference: "
b693336b
PB
5200 "non-standard ABI extension with no compatibility guarantee");
5201 warned_for_return_big_vectors = true;
5202 }
5203 return true;
5204 }
5205
602ea4d3 5206 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5207 return true;
ad630bef 5208
c6e8c921
GK
5209 return false;
5210}
5211
4697a36c
MM
5212/* Initialize a variable CUM of type CUMULATIVE_ARGS
5213 for a call to a function whose data type is FNTYPE.
5214 For a library call, FNTYPE is 0.
5215
5216 For incoming args we set the number of arguments in the prototype large
1c20ae99 5217 so we never return a PARALLEL. */
4697a36c
MM
5218
5219void
f676971a 5220init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5221 rtx libname ATTRIBUTE_UNUSED, int incoming,
5222 int libcall, int n_named_args)
4697a36c
MM
5223{
5224 static CUMULATIVE_ARGS zero_cumulative;
5225
5226 *cum = zero_cumulative;
5227 cum->words = 0;
5228 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5229 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5230 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5231 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5232 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5233 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5234 cum->stdarg = fntype
5235 && (TYPE_ARG_TYPES (fntype) != 0
5236 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5237 != void_type_node));
4697a36c 5238
0f6937fe
AM
5239 cum->nargs_prototype = 0;
5240 if (incoming || cum->prototype)
5241 cum->nargs_prototype = n_named_args;
4697a36c 5242
a5c76ee6 5243 /* Check for a longcall attribute. */
3eb4e360
AM
5244 if ((!fntype && rs6000_default_long_calls)
5245 || (fntype
5246 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5247 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5248 cum->call_cookie |= CALL_LONG;
6a4cee5f 5249
4697a36c
MM
5250 if (TARGET_DEBUG_ARG)
5251 {
5252 fprintf (stderr, "\ninit_cumulative_args:");
5253 if (fntype)
5254 {
5255 tree ret_type = TREE_TYPE (fntype);
5256 fprintf (stderr, " ret code = %s,",
5257 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5258 }
5259
6a4cee5f
MM
5260 if (cum->call_cookie & CALL_LONG)
5261 fprintf (stderr, " longcall,");
5262
4697a36c
MM
5263 fprintf (stderr, " proto = %d, nargs = %d\n",
5264 cum->prototype, cum->nargs_prototype);
5265 }
f676971a 5266
c4ad648e
AM
5267 if (fntype
5268 && !TARGET_ALTIVEC
5269 && TARGET_ALTIVEC_ABI
5270 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5271 {
c85ce869 5272 error ("cannot return value in vector register because"
c4ad648e 5273 " altivec instructions are disabled, use -maltivec"
c85ce869 5274 " to enable them");
c4ad648e 5275 }
4697a36c
MM
5276}
5277\f
fe984136
RH
5278/* Return true if TYPE must be passed on the stack and not in registers. */
5279
5280static bool
586de218 5281rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5282{
5283 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5284 return must_pass_in_stack_var_size (mode, type);
5285 else
5286 return must_pass_in_stack_var_size_or_pad (mode, type);
5287}
5288
c229cba9
DE
5289/* If defined, a C expression which determines whether, and in which
5290 direction, to pad out an argument with extra space. The value
5291 should be of type `enum direction': either `upward' to pad above
5292 the argument, `downward' to pad below, or `none' to inhibit
5293 padding.
5294
5295 For the AIX ABI structs are always stored left shifted in their
5296 argument slot. */
5297
9ebbca7d 5298enum direction
586de218 5299function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5300{
6e985040
AM
5301#ifndef AGGREGATE_PADDING_FIXED
5302#define AGGREGATE_PADDING_FIXED 0
5303#endif
5304#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5305#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5306#endif
5307
5308 if (!AGGREGATE_PADDING_FIXED)
5309 {
5310 /* GCC used to pass structures of the same size as integer types as
5311 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5312 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5313 passed padded downward, except that -mstrict-align further
5314 muddied the water in that multi-component structures of 2 and 4
5315 bytes in size were passed padded upward.
5316
5317 The following arranges for best compatibility with previous
5318 versions of gcc, but removes the -mstrict-align dependency. */
5319 if (BYTES_BIG_ENDIAN)
5320 {
5321 HOST_WIDE_INT size = 0;
5322
5323 if (mode == BLKmode)
5324 {
5325 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5326 size = int_size_in_bytes (type);
5327 }
5328 else
5329 size = GET_MODE_SIZE (mode);
5330
5331 if (size == 1 || size == 2 || size == 4)
5332 return downward;
5333 }
5334 return upward;
5335 }
5336
5337 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5338 {
5339 if (type != 0 && AGGREGATE_TYPE_P (type))
5340 return upward;
5341 }
c229cba9 5342
d3704c46
KH
5343 /* Fall back to the default. */
5344 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5345}
5346
b6c9286a 5347/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5348 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5349 PARM_BOUNDARY is used for all arguments.
f676971a 5350
84e9ad15
AM
5351 V.4 wants long longs and doubles to be double word aligned. Just
5352 testing the mode size is a boneheaded way to do this as it means
5353 that other types such as complex int are also double word aligned.
5354 However, we're stuck with this because changing the ABI might break
5355 existing library interfaces.
5356
b693336b
PB
5357 Doubleword align SPE vectors.
5358 Quadword align Altivec vectors.
5359 Quadword align large synthetic vector types. */
b6c9286a
MM
5360
5361int
b693336b 5362function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5363{
84e9ad15
AM
5364 if (DEFAULT_ABI == ABI_V4
5365 && (GET_MODE_SIZE (mode) == 8
5366 || (TARGET_HARD_FLOAT
5367 && TARGET_FPRS
7393f7f8 5368 && (mode == TFmode || mode == TDmode))))
4ed78545 5369 return 64;
ad630bef
DE
5370 else if (SPE_VECTOR_MODE (mode)
5371 || (type && TREE_CODE (type) == VECTOR_TYPE
5372 && int_size_in_bytes (type) >= 8
5373 && int_size_in_bytes (type) < 16))
e1f83b4d 5374 return 64;
ad630bef
DE
5375 else if (ALTIVEC_VECTOR_MODE (mode)
5376 || (type && TREE_CODE (type) == VECTOR_TYPE
5377 && int_size_in_bytes (type) >= 16))
0ac081f6 5378 return 128;
0b5383eb
DJ
5379 else if (rs6000_darwin64_abi && mode == BLKmode
5380 && type && TYPE_ALIGN (type) > 64)
5381 return 128;
9ebbca7d 5382 else
b6c9286a 5383 return PARM_BOUNDARY;
b6c9286a 5384}
c53bdcf5 5385
294bd182
AM
5386/* For a function parm of MODE and TYPE, return the starting word in
5387 the parameter area. NWORDS of the parameter area are already used. */
5388
5389static unsigned int
5390rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5391{
5392 unsigned int align;
5393 unsigned int parm_offset;
5394
5395 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5396 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5397 return nwords + (-(parm_offset + nwords) & align);
5398}
5399
c53bdcf5
AM
5400/* Compute the size (in words) of a function argument. */
5401
5402static unsigned long
5403rs6000_arg_size (enum machine_mode mode, tree type)
5404{
5405 unsigned long size;
5406
5407 if (mode != BLKmode)
5408 size = GET_MODE_SIZE (mode);
5409 else
5410 size = int_size_in_bytes (type);
5411
5412 if (TARGET_32BIT)
5413 return (size + 3) >> 2;
5414 else
5415 return (size + 7) >> 3;
5416}
b6c9286a 5417\f
0b5383eb 5418/* Use this to flush pending int fields. */
594a51fe
SS
5419
5420static void
0b5383eb
DJ
5421rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5422 HOST_WIDE_INT bitpos)
594a51fe 5423{
0b5383eb
DJ
5424 unsigned int startbit, endbit;
5425 int intregs, intoffset;
5426 enum machine_mode mode;
594a51fe 5427
0b5383eb
DJ
5428 if (cum->intoffset == -1)
5429 return;
594a51fe 5430
0b5383eb
DJ
5431 intoffset = cum->intoffset;
5432 cum->intoffset = -1;
5433
5434 if (intoffset % BITS_PER_WORD != 0)
5435 {
5436 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5437 MODE_INT, 0);
5438 if (mode == BLKmode)
594a51fe 5439 {
0b5383eb
DJ
5440 /* We couldn't find an appropriate mode, which happens,
5441 e.g., in packed structs when there are 3 bytes to load.
5442 Back intoffset back to the beginning of the word in this
5443 case. */
5444 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5445 }
594a51fe 5446 }
0b5383eb
DJ
5447
5448 startbit = intoffset & -BITS_PER_WORD;
5449 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5450 intregs = (endbit - startbit) / BITS_PER_WORD;
5451 cum->words += intregs;
5452}
5453
5454/* The darwin64 ABI calls for us to recurse down through structs,
5455 looking for elements passed in registers. Unfortunately, we have
5456 to track int register count here also because of misalignments
5457 in powerpc alignment mode. */
5458
5459static void
5460rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5461 tree type,
5462 HOST_WIDE_INT startbitpos)
5463{
5464 tree f;
5465
5466 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5467 if (TREE_CODE (f) == FIELD_DECL)
5468 {
5469 HOST_WIDE_INT bitpos = startbitpos;
5470 tree ftype = TREE_TYPE (f);
70fb00df
AP
5471 enum machine_mode mode;
5472 if (ftype == error_mark_node)
5473 continue;
5474 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5475
5476 if (DECL_SIZE (f) != 0
5477 && host_integerp (bit_position (f), 1))
5478 bitpos += int_bit_position (f);
5479
5480 /* ??? FIXME: else assume zero offset. */
5481
5482 if (TREE_CODE (ftype) == RECORD_TYPE)
5483 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5484 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5485 {
5486 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5487 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5488 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5489 }
5490 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5491 {
5492 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5493 cum->vregno++;
5494 cum->words += 2;
5495 }
5496 else if (cum->intoffset == -1)
5497 cum->intoffset = bitpos;
5498 }
594a51fe
SS
5499}
5500
4697a36c
MM
5501/* Update the data in CUM to advance over an argument
5502 of mode MODE and data type TYPE.
b2d04ecf
AM
5503 (TYPE is null for libcalls where that information may not be available.)
5504
5505 Note that for args passed by reference, function_arg will be called
5506 with MODE and TYPE set to that of the pointer to the arg, not the arg
5507 itself. */
4697a36c
MM
5508
5509void
f676971a 5510function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5511 tree type, int named, int depth)
4697a36c 5512{
0b5383eb
DJ
5513 int size;
5514
594a51fe
SS
5515 /* Only tick off an argument if we're not recursing. */
5516 if (depth == 0)
5517 cum->nargs_prototype--;
4697a36c 5518
ad630bef
DE
5519 if (TARGET_ALTIVEC_ABI
5520 && (ALTIVEC_VECTOR_MODE (mode)
5521 || (type && TREE_CODE (type) == VECTOR_TYPE
5522 && int_size_in_bytes (type) == 16)))
0ac081f6 5523 {
4ed78545
AM
5524 bool stack = false;
5525
2858f73a 5526 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5527 {
6d0ef01e
HP
5528 cum->vregno++;
5529 if (!TARGET_ALTIVEC)
c85ce869 5530 error ("cannot pass argument in vector register because"
6d0ef01e 5531 " altivec instructions are disabled, use -maltivec"
c85ce869 5532 " to enable them");
4ed78545
AM
5533
5534 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5535 even if it is going to be passed in a vector register.
4ed78545
AM
5536 Darwin does the same for variable-argument functions. */
5537 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5538 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5539 stack = true;
6d0ef01e 5540 }
4ed78545
AM
5541 else
5542 stack = true;
5543
5544 if (stack)
c4ad648e 5545 {
a594a19c 5546 int align;
f676971a 5547
2858f73a
GK
5548 /* Vector parameters must be 16-byte aligned. This places
5549 them at 2 mod 4 in terms of words in 32-bit mode, since
5550 the parameter save area starts at offset 24 from the
5551 stack. In 64-bit mode, they just have to start on an
5552 even word, since the parameter save area is 16-byte
5553 aligned. Space for GPRs is reserved even if the argument
5554 will be passed in memory. */
5555 if (TARGET_32BIT)
4ed78545 5556 align = (2 - cum->words) & 3;
2858f73a
GK
5557 else
5558 align = cum->words & 1;
c53bdcf5 5559 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5560
a594a19c
GK
5561 if (TARGET_DEBUG_ARG)
5562 {
f676971a 5563 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5564 cum->words, align);
5565 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5566 cum->nargs_prototype, cum->prototype,
2858f73a 5567 GET_MODE_NAME (mode));
a594a19c
GK
5568 }
5569 }
0ac081f6 5570 }
a4b0320c 5571 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5572 && !cum->stdarg
5573 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5574 cum->sysv_gregno++;
594a51fe
SS
5575
5576 else if (rs6000_darwin64_abi
5577 && mode == BLKmode
0b5383eb
DJ
5578 && TREE_CODE (type) == RECORD_TYPE
5579 && (size = int_size_in_bytes (type)) > 0)
5580 {
5581 /* Variable sized types have size == -1 and are
5582 treated as if consisting entirely of ints.
5583 Pad to 16 byte boundary if needed. */
5584 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5585 && (cum->words % 2) != 0)
5586 cum->words++;
5587 /* For varargs, we can just go up by the size of the struct. */
5588 if (!named)
5589 cum->words += (size + 7) / 8;
5590 else
5591 {
5592 /* It is tempting to say int register count just goes up by
5593 sizeof(type)/8, but this is wrong in a case such as
5594 { int; double; int; } [powerpc alignment]. We have to
5595 grovel through the fields for these too. */
5596 cum->intoffset = 0;
5597 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5598 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5599 size * BITS_PER_UNIT);
5600 }
5601 }
f607bc57 5602 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5603 {
a3170dc6 5604 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5605 && (mode == SFmode || mode == DFmode
e41b2a33 5606 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5607 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5608 {
2d83f070
JJ
5609 /* _Decimal128 must use an even/odd register pair. This assumes
5610 that the register number is odd when fregno is odd. */
5611 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5612 cum->fregno++;
5613
5614 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5615 <= FP_ARG_V4_MAX_REG)
602ea4d3 5616 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5617 else
5618 {
602ea4d3 5619 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5620 if (mode == DFmode || mode == TFmode
5621 || mode == DDmode || mode == TDmode)
c4ad648e 5622 cum->words += cum->words & 1;
c53bdcf5 5623 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5624 }
4697a36c 5625 }
4cc833b7
RH
5626 else
5627 {
b2d04ecf 5628 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5629 int gregno = cum->sysv_gregno;
5630
4ed78545
AM
5631 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5632 (r7,r8) or (r9,r10). As does any other 2 word item such
5633 as complex int due to a historical mistake. */
5634 if (n_words == 2)
5635 gregno += (1 - gregno) & 1;
4cc833b7 5636
4ed78545 5637 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5638 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5639 {
4ed78545
AM
5640 /* Long long and SPE vectors are aligned on the stack.
5641 So are other 2 word items such as complex int due to
5642 a historical mistake. */
4cc833b7
RH
5643 if (n_words == 2)
5644 cum->words += cum->words & 1;
5645 cum->words += n_words;
5646 }
4697a36c 5647
4cc833b7
RH
5648 /* Note: continuing to accumulate gregno past when we've started
5649 spilling to the stack indicates the fact that we've started
5650 spilling to the stack to expand_builtin_saveregs. */
5651 cum->sysv_gregno = gregno + n_words;
5652 }
4697a36c 5653
4cc833b7
RH
5654 if (TARGET_DEBUG_ARG)
5655 {
5656 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5657 cum->words, cum->fregno);
5658 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5659 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5660 fprintf (stderr, "mode = %4s, named = %d\n",
5661 GET_MODE_NAME (mode), named);
5662 }
4697a36c
MM
5663 }
5664 else
4cc833b7 5665 {
b2d04ecf 5666 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5667 int start_words = cum->words;
5668 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5669
294bd182 5670 cum->words = align_words + n_words;
4697a36c 5671
ebb109ad 5672 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5673 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5674 {
5675 /* _Decimal128 must be passed in an even/odd float register pair.
5676 This assumes that the register number is odd when fregno is
5677 odd. */
5678 if (mode == TDmode && (cum->fregno % 2) == 1)
5679 cum->fregno++;
5680 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5681 }
4cc833b7
RH
5682
5683 if (TARGET_DEBUG_ARG)
5684 {
5685 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5686 cum->words, cum->fregno);
5687 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5688 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5689 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5690 named, align_words - start_words, depth);
4cc833b7
RH
5691 }
5692 }
4697a36c 5693}
a6c9bed4 5694
f82f556d
AH
5695static rtx
5696spe_build_register_parallel (enum machine_mode mode, int gregno)
5697{
17caeff2 5698 rtx r1, r3, r5, r7;
f82f556d 5699
37409796 5700 switch (mode)
f82f556d 5701 {
37409796 5702 case DFmode:
54b695e7
AH
5703 r1 = gen_rtx_REG (DImode, gregno);
5704 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5705 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5706
5707 case DCmode:
17caeff2 5708 case TFmode:
54b695e7
AH
5709 r1 = gen_rtx_REG (DImode, gregno);
5710 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5711 r3 = gen_rtx_REG (DImode, gregno + 2);
5712 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5713 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5714
17caeff2
JM
5715 case TCmode:
5716 r1 = gen_rtx_REG (DImode, gregno);
5717 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5718 r3 = gen_rtx_REG (DImode, gregno + 2);
5719 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5720 r5 = gen_rtx_REG (DImode, gregno + 4);
5721 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5722 r7 = gen_rtx_REG (DImode, gregno + 6);
5723 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5724 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5725
37409796
NS
5726 default:
5727 gcc_unreachable ();
f82f556d 5728 }
f82f556d 5729}
b78d48dd 5730
f82f556d 5731/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5732static rtx
f676971a 5733rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5734 tree type)
a6c9bed4 5735{
f82f556d
AH
5736 int gregno = cum->sysv_gregno;
5737
5738 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5739 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5740 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5741 || mode == DCmode || mode == TCmode))
f82f556d 5742 {
b5870bee
AH
5743 int n_words = rs6000_arg_size (mode, type);
5744
f82f556d 5745 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5746 if (mode == DFmode)
b5870bee 5747 gregno += (1 - gregno) & 1;
f82f556d 5748
b5870bee
AH
5749 /* Multi-reg args are not split between registers and stack. */
5750 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5751 return NULL_RTX;
5752
5753 return spe_build_register_parallel (mode, gregno);
5754 }
a6c9bed4
AH
5755 if (cum->stdarg)
5756 {
c53bdcf5 5757 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5758
5759 /* SPE vectors are put in odd registers. */
5760 if (n_words == 2 && (gregno & 1) == 0)
5761 gregno += 1;
5762
5763 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5764 {
5765 rtx r1, r2;
5766 enum machine_mode m = SImode;
5767
5768 r1 = gen_rtx_REG (m, gregno);
5769 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5770 r2 = gen_rtx_REG (m, gregno + 1);
5771 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5772 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5773 }
5774 else
b78d48dd 5775 return NULL_RTX;
a6c9bed4
AH
5776 }
5777 else
5778 {
f82f556d
AH
5779 if (gregno <= GP_ARG_MAX_REG)
5780 return gen_rtx_REG (mode, gregno);
a6c9bed4 5781 else
b78d48dd 5782 return NULL_RTX;
a6c9bed4
AH
5783 }
5784}
5785
0b5383eb
DJ
5786/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5787 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5788
0b5383eb 5789static void
bb8df8a6 5790rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5791 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5792{
0b5383eb
DJ
5793 enum machine_mode mode;
5794 unsigned int regno;
5795 unsigned int startbit, endbit;
5796 int this_regno, intregs, intoffset;
5797 rtx reg;
594a51fe 5798
0b5383eb
DJ
5799 if (cum->intoffset == -1)
5800 return;
5801
5802 intoffset = cum->intoffset;
5803 cum->intoffset = -1;
5804
5805 /* If this is the trailing part of a word, try to only load that
5806 much into the register. Otherwise load the whole register. Note
5807 that in the latter case we may pick up unwanted bits. It's not a
5808 problem at the moment but may wish to revisit. */
5809
5810 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5811 {
0b5383eb
DJ
5812 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5813 MODE_INT, 0);
5814 if (mode == BLKmode)
5815 {
5816 /* We couldn't find an appropriate mode, which happens,
5817 e.g., in packed structs when there are 3 bytes to load.
5818 Back intoffset back to the beginning of the word in this
5819 case. */
5820 intoffset = intoffset & -BITS_PER_WORD;
5821 mode = word_mode;
5822 }
5823 }
5824 else
5825 mode = word_mode;
5826
5827 startbit = intoffset & -BITS_PER_WORD;
5828 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5829 intregs = (endbit - startbit) / BITS_PER_WORD;
5830 this_regno = cum->words + intoffset / BITS_PER_WORD;
5831
5832 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5833 cum->use_stack = 1;
bb8df8a6 5834
0b5383eb
DJ
5835 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5836 if (intregs <= 0)
5837 return;
5838
5839 intoffset /= BITS_PER_UNIT;
5840 do
5841 {
5842 regno = GP_ARG_MIN_REG + this_regno;
5843 reg = gen_rtx_REG (mode, regno);
5844 rvec[(*k)++] =
5845 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5846
5847 this_regno += 1;
5848 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5849 mode = word_mode;
5850 intregs -= 1;
5851 }
5852 while (intregs > 0);
5853}
5854
5855/* Recursive workhorse for the following. */
5856
5857static void
586de218 5858rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5859 HOST_WIDE_INT startbitpos, rtx rvec[],
5860 int *k)
5861{
5862 tree f;
5863
5864 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5865 if (TREE_CODE (f) == FIELD_DECL)
5866 {
5867 HOST_WIDE_INT bitpos = startbitpos;
5868 tree ftype = TREE_TYPE (f);
70fb00df
AP
5869 enum machine_mode mode;
5870 if (ftype == error_mark_node)
5871 continue;
5872 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5873
5874 if (DECL_SIZE (f) != 0
5875 && host_integerp (bit_position (f), 1))
5876 bitpos += int_bit_position (f);
5877
5878 /* ??? FIXME: else assume zero offset. */
5879
5880 if (TREE_CODE (ftype) == RECORD_TYPE)
5881 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5882 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5883 {
0b5383eb
DJ
5884#if 0
5885 switch (mode)
594a51fe 5886 {
0b5383eb
DJ
5887 case SCmode: mode = SFmode; break;
5888 case DCmode: mode = DFmode; break;
5889 case TCmode: mode = TFmode; break;
5890 default: break;
594a51fe 5891 }
0b5383eb
DJ
5892#endif
5893 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5894 rvec[(*k)++]
bb8df8a6 5895 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5896 gen_rtx_REG (mode, cum->fregno++),
5897 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5898 if (mode == TFmode || mode == TDmode)
0b5383eb 5899 cum->fregno++;
594a51fe 5900 }
0b5383eb
DJ
5901 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5902 {
5903 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5904 rvec[(*k)++]
bb8df8a6
EC
5905 = gen_rtx_EXPR_LIST (VOIDmode,
5906 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5907 GEN_INT (bitpos / BITS_PER_UNIT));
5908 }
5909 else if (cum->intoffset == -1)
5910 cum->intoffset = bitpos;
5911 }
5912}
594a51fe 5913
0b5383eb
DJ
5914/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5915 the register(s) to be used for each field and subfield of a struct
5916 being passed by value, along with the offset of where the
5917 register's value may be found in the block. FP fields go in FP
5918 register, vector fields go in vector registers, and everything
bb8df8a6 5919 else goes in int registers, packed as in memory.
8ff40a74 5920
0b5383eb
DJ
5921 This code is also used for function return values. RETVAL indicates
5922 whether this is the case.
8ff40a74 5923
a4d05547 5924 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5925 calling convention. */
594a51fe 5926
0b5383eb 5927static rtx
586de218 5928rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5929 int named, bool retval)
5930{
5931 rtx rvec[FIRST_PSEUDO_REGISTER];
5932 int k = 1, kbase = 1;
5933 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5934 /* This is a copy; modifications are not visible to our caller. */
5935 CUMULATIVE_ARGS copy_cum = *orig_cum;
5936 CUMULATIVE_ARGS *cum = &copy_cum;
5937
5938 /* Pad to 16 byte boundary if needed. */
5939 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5940 && (cum->words % 2) != 0)
5941 cum->words++;
5942
5943 cum->intoffset = 0;
5944 cum->use_stack = 0;
5945 cum->named = named;
5946
5947 /* Put entries into rvec[] for individual FP and vector fields, and
5948 for the chunks of memory that go in int regs. Note we start at
5949 element 1; 0 is reserved for an indication of using memory, and
5950 may or may not be filled in below. */
5951 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5952 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5953
5954 /* If any part of the struct went on the stack put all of it there.
5955 This hack is because the generic code for
5956 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5957 parts of the struct are not at the beginning. */
5958 if (cum->use_stack)
5959 {
5960 if (retval)
5961 return NULL_RTX; /* doesn't go in registers at all */
5962 kbase = 0;
5963 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5964 }
5965 if (k > 1 || cum->use_stack)
5966 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5967 else
5968 return NULL_RTX;
5969}
5970
b78d48dd
FJ
5971/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5972
5973static rtx
ec6376ab 5974rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5975{
ec6376ab
AM
5976 int n_units;
5977 int i, k;
5978 rtx rvec[GP_ARG_NUM_REG + 1];
5979
5980 if (align_words >= GP_ARG_NUM_REG)
5981 return NULL_RTX;
5982
5983 n_units = rs6000_arg_size (mode, type);
5984
5985 /* Optimize the simple case where the arg fits in one gpr, except in
5986 the case of BLKmode due to assign_parms assuming that registers are
5987 BITS_PER_WORD wide. */
5988 if (n_units == 0
5989 || (n_units == 1 && mode != BLKmode))
5990 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5991
5992 k = 0;
5993 if (align_words + n_units > GP_ARG_NUM_REG)
5994 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5995 using a magic NULL_RTX component.
79773478
AM
5996 This is not strictly correct. Only some of the arg belongs in
5997 memory, not all of it. However, the normal scheme using
5998 function_arg_partial_nregs can result in unusual subregs, eg.
5999 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6000 store the whole arg to memory is often more efficient than code
6001 to store pieces, and we know that space is available in the right
6002 place for the whole arg. */
ec6376ab
AM
6003 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6004
6005 i = 0;
6006 do
36a454e1 6007 {
ec6376ab
AM
6008 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6009 rtx off = GEN_INT (i++ * 4);
6010 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6011 }
ec6376ab
AM
6012 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6013
6014 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6015}
6016
4697a36c
MM
6017/* Determine where to put an argument to a function.
6018 Value is zero to push the argument on the stack,
6019 or a hard register in which to store the argument.
6020
6021 MODE is the argument's machine mode.
6022 TYPE is the data type of the argument (as a tree).
6023 This is null for libcalls where that information may
6024 not be available.
6025 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6026 the preceding args and about the function being called. It is
6027 not modified in this routine.
4697a36c
MM
6028 NAMED is nonzero if this argument is a named parameter
6029 (otherwise it is an extra parameter matching an ellipsis).
6030
6031 On RS/6000 the first eight words of non-FP are normally in registers
6032 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6033 Under V.4, the first 8 FP args are in registers.
6034
6035 If this is floating-point and no prototype is specified, we use
6036 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6037 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6038 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6039 doesn't support PARALLEL anyway.
6040
6041 Note that for args passed by reference, function_arg will be called
6042 with MODE and TYPE set to that of the pointer to the arg, not the arg
6043 itself. */
4697a36c 6044
9390387d 6045rtx
f676971a 6046function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6047 tree type, int named)
4697a36c 6048{
4cc833b7 6049 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6050
a4f6c312
SS
6051 /* Return a marker to indicate whether CR1 needs to set or clear the
6052 bit that V.4 uses to say fp args were passed in registers.
6053 Assume that we don't need the marker for software floating point,
6054 or compiler generated library calls. */
4697a36c
MM
6055 if (mode == VOIDmode)
6056 {
f607bc57 6057 if (abi == ABI_V4
b9599e46 6058 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6059 && (cum->stdarg
6060 || (cum->nargs_prototype < 0
6061 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6062 {
a3170dc6
AH
6063 /* For the SPE, we need to crxor CR6 always. */
6064 if (TARGET_SPE_ABI)
6065 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6066 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6067 return GEN_INT (cum->call_cookie
6068 | ((cum->fregno == FP_ARG_MIN_REG)
6069 ? CALL_V4_SET_FP_ARGS
6070 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6071 }
4697a36c 6072
7509c759 6073 return GEN_INT (cum->call_cookie);
4697a36c
MM
6074 }
6075
0b5383eb
DJ
6076 if (rs6000_darwin64_abi && mode == BLKmode
6077 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6078 {
0b5383eb 6079 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6080 if (rslt != NULL_RTX)
6081 return rslt;
6082 /* Else fall through to usual handling. */
6083 }
6084
2858f73a 6085 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6086 if (TARGET_64BIT && ! cum->prototype)
6087 {
c4ad648e
AM
6088 /* Vector parameters get passed in vector register
6089 and also in GPRs or memory, in absence of prototype. */
6090 int align_words;
6091 rtx slot;
6092 align_words = (cum->words + 1) & ~1;
6093
6094 if (align_words >= GP_ARG_NUM_REG)
6095 {
6096 slot = NULL_RTX;
6097 }
6098 else
6099 {
6100 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6101 }
6102 return gen_rtx_PARALLEL (mode,
6103 gen_rtvec (2,
6104 gen_rtx_EXPR_LIST (VOIDmode,
6105 slot, const0_rtx),
6106 gen_rtx_EXPR_LIST (VOIDmode,
6107 gen_rtx_REG (mode, cum->vregno),
6108 const0_rtx)));
c72d6c26
HP
6109 }
6110 else
6111 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6112 else if (TARGET_ALTIVEC_ABI
6113 && (ALTIVEC_VECTOR_MODE (mode)
6114 || (type && TREE_CODE (type) == VECTOR_TYPE
6115 && int_size_in_bytes (type) == 16)))
0ac081f6 6116 {
2858f73a 6117 if (named || abi == ABI_V4)
a594a19c 6118 return NULL_RTX;
0ac081f6 6119 else
a594a19c
GK
6120 {
6121 /* Vector parameters to varargs functions under AIX or Darwin
6122 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6123 int align, align_words, n_words;
6124 enum machine_mode part_mode;
a594a19c
GK
6125
6126 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6127 2 mod 4 in terms of words in 32-bit mode, since the parameter
6128 save area starts at offset 24 from the stack. In 64-bit mode,
6129 they just have to start on an even word, since the parameter
6130 save area is 16-byte aligned. */
6131 if (TARGET_32BIT)
4ed78545 6132 align = (2 - cum->words) & 3;
2858f73a
GK
6133 else
6134 align = cum->words & 1;
a594a19c
GK
6135 align_words = cum->words + align;
6136
6137 /* Out of registers? Memory, then. */
6138 if (align_words >= GP_ARG_NUM_REG)
6139 return NULL_RTX;
ec6376ab
AM
6140
6141 if (TARGET_32BIT && TARGET_POWERPC64)
6142 return rs6000_mixed_function_arg (mode, type, align_words);
6143
2858f73a
GK
6144 /* The vector value goes in GPRs. Only the part of the
6145 value in GPRs is reported here. */
ec6376ab
AM
6146 part_mode = mode;
6147 n_words = rs6000_arg_size (mode, type);
6148 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6149 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6150 is either wholly in GPRs or half in GPRs and half not. */
6151 part_mode = DImode;
ec6376ab
AM
6152
6153 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6154 }
0ac081f6 6155 }
f82f556d
AH
6156 else if (TARGET_SPE_ABI && TARGET_SPE
6157 && (SPE_VECTOR_MODE (mode)
18f63bfa 6158 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6159 || mode == DCmode
6160 || mode == TFmode
6161 || mode == TCmode))))
a6c9bed4 6162 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6163
f607bc57 6164 else if (abi == ABI_V4)
4697a36c 6165 {
a3170dc6 6166 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6167 && (mode == SFmode || mode == DFmode
7393f7f8 6168 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6169 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6170 {
2d83f070
JJ
6171 /* _Decimal128 must use an even/odd register pair. This assumes
6172 that the register number is odd when fregno is odd. */
6173 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6174 cum->fregno++;
6175
6176 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6177 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6178 return gen_rtx_REG (mode, cum->fregno);
6179 else
b78d48dd 6180 return NULL_RTX;
4cc833b7
RH
6181 }
6182 else
6183 {
b2d04ecf 6184 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6185 int gregno = cum->sysv_gregno;
6186
4ed78545
AM
6187 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6188 (r7,r8) or (r9,r10). As does any other 2 word item such
6189 as complex int due to a historical mistake. */
6190 if (n_words == 2)
6191 gregno += (1 - gregno) & 1;
4cc833b7 6192
4ed78545 6193 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6194 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6195 return NULL_RTX;
ec6376ab
AM
6196
6197 if (TARGET_32BIT && TARGET_POWERPC64)
6198 return rs6000_mixed_function_arg (mode, type,
6199 gregno - GP_ARG_MIN_REG);
6200 return gen_rtx_REG (mode, gregno);
4cc833b7 6201 }
4697a36c 6202 }
4cc833b7
RH
6203 else
6204 {
294bd182 6205 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6206
2d83f070
JJ
6207 /* _Decimal128 must be passed in an even/odd float register pair.
6208 This assumes that the register number is odd when fregno is odd. */
6209 if (mode == TDmode && (cum->fregno % 2) == 1)
6210 cum->fregno++;
6211
2858f73a 6212 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6213 {
ec6376ab
AM
6214 rtx rvec[GP_ARG_NUM_REG + 1];
6215 rtx r;
6216 int k;
c53bdcf5
AM
6217 bool needs_psave;
6218 enum machine_mode fmode = mode;
c53bdcf5
AM
6219 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6220
6221 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6222 {
c53bdcf5
AM
6223 /* Currently, we only ever need one reg here because complex
6224 doubles are split. */
7393f7f8
BE
6225 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6226 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6227
7393f7f8
BE
6228 /* Long double or _Decimal128 split over regs and memory. */
6229 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6230 }
c53bdcf5
AM
6231
6232 /* Do we also need to pass this arg in the parameter save
6233 area? */
6234 needs_psave = (type
6235 && (cum->nargs_prototype <= 0
6236 || (DEFAULT_ABI == ABI_AIX
de17c25f 6237 && TARGET_XL_COMPAT
c53bdcf5
AM
6238 && align_words >= GP_ARG_NUM_REG)));
6239
6240 if (!needs_psave && mode == fmode)
ec6376ab 6241 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6242
ec6376ab 6243 k = 0;
c53bdcf5
AM
6244 if (needs_psave)
6245 {
ec6376ab 6246 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6247 This piece must come first, before the fprs. */
c53bdcf5
AM
6248 if (align_words < GP_ARG_NUM_REG)
6249 {
6250 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6251
6252 if (align_words + n_words > GP_ARG_NUM_REG
6253 || (TARGET_32BIT && TARGET_POWERPC64))
6254 {
6255 /* If this is partially on the stack, then we only
6256 include the portion actually in registers here. */
6257 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6258 rtx off;
79773478
AM
6259 int i = 0;
6260 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6261 /* Not all of the arg fits in gprs. Say that it
6262 goes in memory too, using a magic NULL_RTX
6263 component. Also see comment in
6264 rs6000_mixed_function_arg for why the normal
6265 function_arg_partial_nregs scheme doesn't work
6266 in this case. */
6267 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6268 const0_rtx);
ec6376ab
AM
6269 do
6270 {
6271 r = gen_rtx_REG (rmode,
6272 GP_ARG_MIN_REG + align_words);
2e6c9641 6273 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6274 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6275 }
6276 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6277 }
6278 else
6279 {
6280 /* The whole arg fits in gprs. */
6281 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6282 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6283 }
c53bdcf5 6284 }
ec6376ab
AM
6285 else
6286 /* It's entirely in memory. */
6287 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6288 }
6289
ec6376ab
AM
6290 /* Describe where this piece goes in the fprs. */
6291 r = gen_rtx_REG (fmode, cum->fregno);
6292 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6293
6294 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6295 }
6296 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6297 {
ec6376ab
AM
6298 if (TARGET_32BIT && TARGET_POWERPC64)
6299 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6300
4eeca74f
AM
6301 if (mode == BLKmode)
6302 mode = Pmode;
6303
b2d04ecf
AM
6304 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6305 }
4cc833b7
RH
6306 else
6307 return NULL_RTX;
4697a36c 6308 }
4697a36c
MM
6309}
6310\f
ec6376ab 6311/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6312 the number of bytes passed in registers. For args passed entirely in
6313 registers or entirely in memory, zero. When an arg is described by a
6314 PARALLEL, perhaps using more than one register type, this function
6315 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6316
78a52f11
RH
6317static int
6318rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6319 tree type, bool named)
4697a36c 6320{
c53bdcf5 6321 int ret = 0;
ec6376ab 6322 int align_words;
c53bdcf5 6323
f607bc57 6324 if (DEFAULT_ABI == ABI_V4)
4697a36c 6325 return 0;
4697a36c 6326
c53bdcf5
AM
6327 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6328 && cum->nargs_prototype >= 0)
6329 return 0;
6330
0b5383eb
DJ
6331 /* In this complicated case we just disable the partial_nregs code. */
6332 if (rs6000_darwin64_abi && mode == BLKmode
6333 && TREE_CODE (type) == RECORD_TYPE
6334 && int_size_in_bytes (type) > 0)
6335 return 0;
6336
294bd182 6337 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6338
79773478
AM
6339 if (USE_FP_FOR_ARG_P (cum, mode, type))
6340 {
fb63c729
AM
6341 /* If we are passing this arg in the fixed parameter save area
6342 (gprs or memory) as well as fprs, then this function should
79773478
AM
6343 return the number of partial bytes passed in the parameter
6344 save area rather than partial bytes passed in fprs. */
6345 if (type
6346 && (cum->nargs_prototype <= 0
6347 || (DEFAULT_ABI == ABI_AIX
6348 && TARGET_XL_COMPAT
6349 && align_words >= GP_ARG_NUM_REG)))
6350 return 0;
6351 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6352 > FP_ARG_MAX_REG + 1)
ac7e839c 6353 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6354 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6355 return 0;
6356 }
6357
ec6376ab
AM
6358 if (align_words < GP_ARG_NUM_REG
6359 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6360 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6361
c53bdcf5 6362 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6363 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6364
c53bdcf5 6365 return ret;
4697a36c
MM
6366}
6367\f
6368/* A C expression that indicates when an argument must be passed by
6369 reference. If nonzero for an argument, a copy of that argument is
6370 made in memory and a pointer to the argument is passed instead of
6371 the argument itself. The pointer is passed in whatever way is
6372 appropriate for passing a pointer to that type.
6373
b2d04ecf
AM
6374 Under V.4, aggregates and long double are passed by reference.
6375
6376 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6377 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6378
6379 As an extension to all ABIs, variable sized types are passed by
6380 reference. */
4697a36c 6381
8cd5a4e0 6382static bool
f676971a 6383rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6384 enum machine_mode mode, const_tree type,
bada2eb8 6385 bool named ATTRIBUTE_UNUSED)
4697a36c 6386{
602ea4d3 6387 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6388 {
6389 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6390 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6391 return 1;
6392 }
6393
6394 if (!type)
6395 return 0;
4697a36c 6396
bada2eb8
DE
6397 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6398 {
6399 if (TARGET_DEBUG_ARG)
6400 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6401 return 1;
6402 }
6403
6404 if (int_size_in_bytes (type) < 0)
6405 {
6406 if (TARGET_DEBUG_ARG)
6407 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6408 return 1;
6409 }
6410
6411 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6412 modes only exist for GCC vector types if -maltivec. */
6413 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6414 {
6415 if (TARGET_DEBUG_ARG)
6416 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6417 return 1;
6418 }
b693336b
PB
6419
6420 /* Pass synthetic vectors in memory. */
bada2eb8 6421 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6422 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6423 {
6424 static bool warned_for_pass_big_vectors = false;
6425 if (TARGET_DEBUG_ARG)
6426 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6427 if (!warned_for_pass_big_vectors)
6428 {
d4ee4d25 6429 warning (0, "GCC vector passed by reference: "
b693336b
PB
6430 "non-standard ABI extension with no compatibility guarantee");
6431 warned_for_pass_big_vectors = true;
6432 }
6433 return 1;
6434 }
6435
b2d04ecf 6436 return 0;
4697a36c 6437}
5985c7a6
FJ
6438
6439static void
2d9db8eb 6440rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6441{
6442 int i;
6443 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6444
6445 if (nregs == 0)
6446 return;
6447
c4ad648e 6448 for (i = 0; i < nregs; i++)
5985c7a6 6449 {
9390387d 6450 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6451 if (reload_completed)
c4ad648e
AM
6452 {
6453 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6454 tem = NULL_RTX;
6455 else
6456 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6457 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6458 }
5985c7a6
FJ
6459 else
6460 tem = replace_equiv_address (tem, XEXP (tem, 0));
6461
37409796 6462 gcc_assert (tem);
5985c7a6
FJ
6463
6464 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6465 }
6466}
4697a36c
MM
6467\f
6468/* Perform any needed actions needed for a function that is receiving a
f676971a 6469 variable number of arguments.
4697a36c
MM
6470
6471 CUM is as above.
6472
6473 MODE and TYPE are the mode and type of the current parameter.
6474
6475 PRETEND_SIZE is a variable that should be set to the amount of stack
6476 that must be pushed by the prolog to pretend that our caller pushed
6477 it.
6478
6479 Normally, this macro will push all remaining incoming registers on the
6480 stack and set PRETEND_SIZE to the length of the registers pushed. */
6481
c6e8c921 6482static void
f676971a 6483setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6484 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6485 int no_rtl)
4697a36c 6486{
4cc833b7
RH
6487 CUMULATIVE_ARGS next_cum;
6488 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6489 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6490 int first_reg_offset;
6491 alias_set_type set;
4697a36c 6492
f31bf321 6493 /* Skip the last named argument. */
d34c5b80 6494 next_cum = *cum;
594a51fe 6495 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6496
f607bc57 6497 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6498 {
5b667039
JJ
6499 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6500
60e2d0ca 6501 if (! no_rtl)
5b667039
JJ
6502 {
6503 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6504 HOST_WIDE_INT offset = 0;
6505
6506 /* Try to optimize the size of the varargs save area.
6507 The ABI requires that ap.reg_save_area is doubleword
6508 aligned, but we don't need to allocate space for all
6509 the bytes, only those to which we actually will save
6510 anything. */
6511 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6512 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6513 if (TARGET_HARD_FLOAT && TARGET_FPRS
6514 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6515 && cfun->va_list_fpr_size)
6516 {
6517 if (gpr_reg_num)
6518 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6519 * UNITS_PER_FP_WORD;
6520 if (cfun->va_list_fpr_size
6521 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6522 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6523 else
6524 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6525 * UNITS_PER_FP_WORD;
6526 }
6527 if (gpr_reg_num)
6528 {
6529 offset = -((first_reg_offset * reg_size) & ~7);
6530 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6531 {
6532 gpr_reg_num = cfun->va_list_gpr_size;
6533 if (reg_size == 4 && (first_reg_offset & 1))
6534 gpr_reg_num++;
6535 }
6536 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6537 }
6538 else if (fpr_size)
6539 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6540 * UNITS_PER_FP_WORD
6541 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6542
5b667039
JJ
6543 if (gpr_size + fpr_size)
6544 {
6545 rtx reg_save_area
6546 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6547 gcc_assert (GET_CODE (reg_save_area) == MEM);
6548 reg_save_area = XEXP (reg_save_area, 0);
6549 if (GET_CODE (reg_save_area) == PLUS)
6550 {
6551 gcc_assert (XEXP (reg_save_area, 0)
6552 == virtual_stack_vars_rtx);
6553 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6554 offset += INTVAL (XEXP (reg_save_area, 1));
6555 }
6556 else
6557 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6558 }
6559
6560 cfun->machine->varargs_save_offset = offset;
6561 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6562 }
4697a36c 6563 }
60e2d0ca 6564 else
4697a36c 6565 {
d34c5b80 6566 first_reg_offset = next_cum.words;
4cc833b7 6567 save_area = virtual_incoming_args_rtx;
4697a36c 6568
fe984136 6569 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6570 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6571 }
4697a36c 6572
dfafc897 6573 set = get_varargs_alias_set ();
9d30f3c1
JJ
6574 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6575 && cfun->va_list_gpr_size)
4cc833b7 6576 {
9d30f3c1
JJ
6577 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6578
6579 if (va_list_gpr_counter_field)
6580 {
6581 /* V4 va_list_gpr_size counts number of registers needed. */
6582 if (nregs > cfun->va_list_gpr_size)
6583 nregs = cfun->va_list_gpr_size;
6584 }
6585 else
6586 {
6587 /* char * va_list instead counts number of bytes needed. */
6588 if (nregs > cfun->va_list_gpr_size / reg_size)
6589 nregs = cfun->va_list_gpr_size / reg_size;
6590 }
6591
dfafc897 6592 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6593 plus_constant (save_area,
13e2e16e
DE
6594 first_reg_offset * reg_size));
6595 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6596 set_mem_alias_set (mem, set);
8ac61af7 6597 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6598
f676971a 6599 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6600 nregs);
4697a36c
MM
6601 }
6602
4697a36c 6603 /* Save FP registers if needed. */
f607bc57 6604 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6605 && TARGET_HARD_FLOAT && TARGET_FPRS
6606 && ! no_rtl
9d30f3c1
JJ
6607 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6608 && cfun->va_list_fpr_size)
4697a36c 6609 {
9d30f3c1 6610 int fregno = next_cum.fregno, nregs;
9ebbca7d 6611 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6612 rtx lab = gen_label_rtx ();
5b667039
JJ
6613 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6614 * UNITS_PER_FP_WORD);
4697a36c 6615
c4ad648e
AM
6616 emit_jump_insn
6617 (gen_rtx_SET (VOIDmode,
6618 pc_rtx,
6619 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6620 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6621 const0_rtx),
39403d82 6622 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6623 pc_rtx)));
6624
9d30f3c1
JJ
6625 for (nregs = 0;
6626 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6627 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6628 {
5496b36f 6629 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6630 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6631 set_mem_alias_set (mem, set);
94ff898d 6632 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6633 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6634 }
4cc833b7
RH
6635
6636 emit_label (lab);
4697a36c 6637 }
4697a36c 6638}
4697a36c 6639
dfafc897 6640/* Create the va_list data type. */
2c4974b7 6641
c35d187f
RH
6642static tree
6643rs6000_build_builtin_va_list (void)
dfafc897 6644{
64c2816f 6645 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6646
9ebbca7d
GK
6647 /* For AIX, prefer 'char *' because that's what the system
6648 header files like. */
f607bc57 6649 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6650 return build_pointer_type (char_type_node);
dfafc897 6651
f1e639b1 6652 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6653 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6654
f676971a 6655 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6656 unsigned_char_type_node);
f676971a 6657 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6658 unsigned_char_type_node);
64c2816f
DT
6659 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6660 every user file. */
6661 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6662 short_unsigned_type_node);
dfafc897
FS
6663 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6664 ptr_type_node);
6665 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6666 ptr_type_node);
6667
9d30f3c1
JJ
6668 va_list_gpr_counter_field = f_gpr;
6669 va_list_fpr_counter_field = f_fpr;
6670
dfafc897
FS
6671 DECL_FIELD_CONTEXT (f_gpr) = record;
6672 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6673 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6674 DECL_FIELD_CONTEXT (f_ovf) = record;
6675 DECL_FIELD_CONTEXT (f_sav) = record;
6676
bab45a51
FS
6677 TREE_CHAIN (record) = type_decl;
6678 TYPE_NAME (record) = type_decl;
dfafc897
FS
6679 TYPE_FIELDS (record) = f_gpr;
6680 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6681 TREE_CHAIN (f_fpr) = f_res;
6682 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6683 TREE_CHAIN (f_ovf) = f_sav;
6684
6685 layout_type (record);
6686
6687 /* The correct type is an array type of one element. */
6688 return build_array_type (record, build_index_type (size_zero_node));
6689}
6690
6691/* Implement va_start. */
6692
d7bd8aeb 6693static void
a2369ed3 6694rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6695{
dfafc897 6696 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6697 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6698 tree gpr, fpr, ovf, sav, t;
2c4974b7 6699
dfafc897 6700 /* Only SVR4 needs something special. */
f607bc57 6701 if (DEFAULT_ABI != ABI_V4)
dfafc897 6702 {
e5faf155 6703 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6704 return;
6705 }
6706
973a648b 6707 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6708 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6709 f_res = TREE_CHAIN (f_fpr);
6710 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6711 f_sav = TREE_CHAIN (f_ovf);
6712
872a65b5 6713 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6714 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6715 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6716 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6717 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6718
6719 /* Count number of gp and fp argument registers used. */
38173d38
JH
6720 words = crtl->args.info.words;
6721 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6722 GP_ARG_NUM_REG);
38173d38 6723 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6724 FP_ARG_NUM_REG);
dfafc897
FS
6725
6726 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6727 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6728 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6729 words, n_gpr, n_fpr);
dfafc897 6730
9d30f3c1
JJ
6731 if (cfun->va_list_gpr_size)
6732 {
07beea0d 6733 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6734 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6735 TREE_SIDE_EFFECTS (t) = 1;
6736 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6737 }
58c8adc1 6738
9d30f3c1
JJ
6739 if (cfun->va_list_fpr_size)
6740 {
07beea0d 6741 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6742 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6743 TREE_SIDE_EFFECTS (t) = 1;
6744 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6745 }
dfafc897
FS
6746
6747 /* Find the overflow area. */
6748 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6749 if (words != 0)
5be014d5
AP
6750 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6751 size_int (words * UNITS_PER_WORD));
07beea0d 6752 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6753 TREE_SIDE_EFFECTS (t) = 1;
6754 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6755
9d30f3c1
JJ
6756 /* If there were no va_arg invocations, don't set up the register
6757 save area. */
6758 if (!cfun->va_list_gpr_size
6759 && !cfun->va_list_fpr_size
6760 && n_gpr < GP_ARG_NUM_REG
6761 && n_fpr < FP_ARG_V4_MAX_REG)
6762 return;
6763
dfafc897
FS
6764 /* Find the register save area. */
6765 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6766 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6767 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6768 size_int (cfun->machine->varargs_save_offset));
07beea0d 6769 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6770 TREE_SIDE_EFFECTS (t) = 1;
6771 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6772}
6773
6774/* Implement va_arg. */
6775
23a60a04
JM
6776tree
6777rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6778{
cd3ce9b4
JM
6779 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6780 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6781 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6782 tree lab_false, lab_over, addr;
6783 int align;
6784 tree ptrtype = build_pointer_type (type);
7393f7f8 6785 int regalign = 0;
cd3ce9b4 6786
08b0dc1b
RH
6787 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6788 {
6789 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6790 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6791 }
6792
cd3ce9b4
JM
6793 if (DEFAULT_ABI != ABI_V4)
6794 {
08b0dc1b 6795 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6796 {
6797 tree elem_type = TREE_TYPE (type);
6798 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6799 int elem_size = GET_MODE_SIZE (elem_mode);
6800
6801 if (elem_size < UNITS_PER_WORD)
6802 {
23a60a04 6803 tree real_part, imag_part;
cd3ce9b4
JM
6804 tree post = NULL_TREE;
6805
23a60a04
JM
6806 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6807 &post);
6808 /* Copy the value into a temporary, lest the formal temporary
6809 be reused out from under us. */
6810 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6811 append_to_statement_list (post, pre_p);
6812
23a60a04
JM
6813 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6814 post_p);
cd3ce9b4 6815
47a25a46 6816 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6817 }
6818 }
6819
23a60a04 6820 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6821 }
6822
6823 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6824 f_fpr = TREE_CHAIN (f_gpr);
6825 f_res = TREE_CHAIN (f_fpr);
6826 f_ovf = TREE_CHAIN (f_res);
6827 f_sav = TREE_CHAIN (f_ovf);
6828
872a65b5 6829 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6830 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6831 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6832 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6833 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6834
6835 size = int_size_in_bytes (type);
6836 rsize = (size + 3) / 4;
6837 align = 1;
6838
08b0dc1b 6839 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6840 && (TYPE_MODE (type) == SFmode
6841 || TYPE_MODE (type) == DFmode
7393f7f8 6842 || TYPE_MODE (type) == TFmode
e41b2a33 6843 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6844 || TYPE_MODE (type) == DDmode
6845 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6846 {
6847 /* FP args go in FP registers, if present. */
cd3ce9b4 6848 reg = fpr;
602ea4d3 6849 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6850 sav_ofs = 8*4;
6851 sav_scale = 8;
e41b2a33 6852 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6853 align = 8;
6854 }
6855 else
6856 {
6857 /* Otherwise into GP registers. */
cd3ce9b4
JM
6858 reg = gpr;
6859 n_reg = rsize;
6860 sav_ofs = 0;
6861 sav_scale = 4;
6862 if (n_reg == 2)
6863 align = 8;
6864 }
6865
6866 /* Pull the value out of the saved registers.... */
6867
6868 lab_over = NULL;
6869 addr = create_tmp_var (ptr_type_node, "addr");
6870 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6871
6872 /* AltiVec vectors never go in registers when -mabi=altivec. */
6873 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6874 align = 16;
6875 else
6876 {
6877 lab_false = create_artificial_label ();
6878 lab_over = create_artificial_label ();
6879
6880 /* Long long and SPE vectors are aligned in the registers.
6881 As are any other 2 gpr item such as complex int due to a
6882 historical mistake. */
6883 u = reg;
602ea4d3 6884 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6885 {
7393f7f8 6886 regalign = 1;
cd3ce9b4 6887 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6888 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6889 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6890 }
7393f7f8
BE
6891 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6892 reg number is 0 for f1, so we want to make it odd. */
6893 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6894 {
6895 regalign = 1;
383e91e4
JJ
6896 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg,
6897 build_int_cst (TREE_TYPE (reg), 1));
7393f7f8
BE
6898 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6899 }
cd3ce9b4 6900
95674810 6901 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6902 t = build2 (GE_EXPR, boolean_type_node, u, t);
6903 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6904 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6905 gimplify_and_add (t, pre_p);
6906
6907 t = sav;
6908 if (sav_ofs)
5be014d5 6909 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6910
8fb632eb
ZD
6911 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6912 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6913 u = fold_convert (sizetype, u);
6914 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6915 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6916
e41b2a33
PB
6917 /* _Decimal32 varargs are located in the second word of the 64-bit
6918 FP register for 32-bit binaries. */
4f011e1e
JM
6919 if (!TARGET_POWERPC64
6920 && TARGET_HARD_FLOAT && TARGET_FPRS
6921 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
6922 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6923
07beea0d 6924 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6925 gimplify_and_add (t, pre_p);
6926
6927 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6928 gimplify_and_add (t, pre_p);
6929
6930 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6931 append_to_statement_list (t, pre_p);
6932
7393f7f8 6933 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6934 {
6935 /* Ensure that we don't find any more args in regs.
7393f7f8 6936 Alignment has taken care of for special cases. */
383e91e4
JJ
6937 t = build_gimple_modify_stmt (reg,
6938 build_int_cst (TREE_TYPE (reg), 8));
cd3ce9b4
JM
6939 gimplify_and_add (t, pre_p);
6940 }
6941 }
6942
6943 /* ... otherwise out of the overflow area. */
6944
6945 /* Care for on-stack alignment if needed. */
6946 t = ovf;
6947 if (align != 1)
6948 {
5be014d5
AP
6949 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6950 t = fold_convert (sizetype, t);
4a90aeeb 6951 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6952 size_int (-align));
6953 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6954 }
6955 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6956
07beea0d 6957 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6958 gimplify_and_add (u, pre_p);
6959
5be014d5 6960 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6961 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6962 gimplify_and_add (t, pre_p);
6963
6964 if (lab_over)
6965 {
6966 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6967 append_to_statement_list (t, pre_p);
6968 }
6969
0cfbc62b
JM
6970 if (STRICT_ALIGNMENT
6971 && (TYPE_ALIGN (type)
6972 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6973 {
6974 /* The value (of type complex double, for example) may not be
6975 aligned in memory in the saved registers, so copy via a
6976 temporary. (This is the same code as used for SPARC.) */
6977 tree tmp = create_tmp_var (type, "va_arg_tmp");
6978 tree dest_addr = build_fold_addr_expr (tmp);
6979
5039610b
SL
6980 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6981 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6982
6983 gimplify_and_add (copy, pre_p);
6984 addr = dest_addr;
6985 }
6986
08b0dc1b 6987 addr = fold_convert (ptrtype, addr);
872a65b5 6988 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6989}
6990
0ac081f6
AH
6991/* Builtins. */
6992
58646b77
PB
6993static void
6994def_builtin (int mask, const char *name, tree type, int code)
6995{
96038623 6996 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6997 {
6998 if (rs6000_builtin_decls[code])
6999 abort ();
7000
7001 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7002 add_builtin_function (name, type, code, BUILT_IN_MD,
7003 NULL, NULL_TREE);
58646b77
PB
7004 }
7005}
0ac081f6 7006
24408032
AH
7007/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7008
2212663f 7009static const struct builtin_description bdesc_3arg[] =
24408032
AH
7010{
7011 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7012 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7013 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7014 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7015 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7016 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7017 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7018 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7019 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7020 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7021 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7022 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7026 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7027 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7029 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7030 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7031 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7032 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7033 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7034
7035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7050
7051 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7052 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7053 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7054 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7055 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7056 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7057 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7058 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7059 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7060};
2212663f 7061
95385cbb
AH
7062/* DST operations: void foo (void *, const int, const char). */
7063
7064static const struct builtin_description bdesc_dst[] =
7065{
7066 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7067 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7068 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7069 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7070
7071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7075};
7076
2212663f 7077/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7078
a3170dc6 7079static struct builtin_description bdesc_2arg[] =
0ac081f6 7080{
f18c054f
DB
7081 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7082 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7083 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7084 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7085 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7087 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7090 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7092 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7093 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7094 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7096 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7097 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7098 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7101 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7102 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7104 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7105 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7106 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7107 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7108 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7109 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7110 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7111 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7112 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7113 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7114 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7115 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7117 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7118 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7119 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7120 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7121 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7122 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7123 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7124 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7125 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7126 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7127 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7128 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7129 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7130 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7131 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7132 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7133 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7134 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7135 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7136 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7137 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7138 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7139 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7140 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7141 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7142 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7143 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7144 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7145 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7146 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7147 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7148 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7149 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7150 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7151 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7152 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7153 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7154 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7155 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7156 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7157 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7158 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7159 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7160 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7161 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7162 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7163 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7164 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7165 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7166 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7167 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7168 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7169 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7170 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7171 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7172 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7173 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7174 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7175 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7176 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7177 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7178 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7179 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7180 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7181 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7182 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7183 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7184 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7185 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7186 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7187 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7188 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7189 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7190 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7191 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7192
58646b77
PB
7193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7243 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7244 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7245 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7246 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7247 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7248 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7249 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7250 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7251 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7254 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7255 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7256 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7257 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7258 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7259 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7260 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7261 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7262 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7263 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7264 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7265 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7266 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7267 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7268 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7269 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7270 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7271 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7272 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7273 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7274 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7275 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7276 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7277 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7278 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7279 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7280 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7281 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7282 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7283 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7284 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7285 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7286 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7287 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7288 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7289 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7290 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7291 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7292 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7293 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7294 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7295 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7296 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7297 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7298 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7299 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7300 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7301 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7302 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7303 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7304 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7305 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7306 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7307 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7308 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7309 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7310 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7311 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7312 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7313 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7314 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7315 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7316 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7317 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7318 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7319 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7320
96038623
DE
7321 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7322 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7323 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7324 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7325 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7326 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7327 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7328 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7329 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7330 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7331
a3170dc6
AH
7332 /* Place holder, leave as first spe builtin. */
7333 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7334 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7335 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7336 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7337 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7338 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7339 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7340 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7341 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7342 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7343 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7344 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7345 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7346 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7347 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7348 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7349 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7350 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7351 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7352 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7353 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7354 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7355 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7356 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7357 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7358 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7359 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7360 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7361 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7362 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7363 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7364 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7365 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7366 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7367 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7368 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7369 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7370 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7371 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7372 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7373 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7374 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7375 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7376 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7377 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7378 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7379 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7380 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7381 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7382 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7383 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7384 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7385 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7386 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7387 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7388 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7389 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7390 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7391 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7392 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7393 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7394 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7395 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7396 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7397 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7398 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7399 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7400 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7401 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7402 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7403 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7404 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7405 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7406 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7407 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7408 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7409 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7410 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7411 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7412 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7413 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7414 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7415 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7416 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7417 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7418 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7419 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7420 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7421 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7422 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7423 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7424 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7425 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7426 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7427 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7428 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7429 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7430 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7431 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7432 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7433 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7434 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7435 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7436 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7437 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7438 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7439 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7440 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7441 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7442
7443 /* SPE binary operations expecting a 5-bit unsigned literal. */
7444 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7445
7446 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7447 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7448 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7449 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7450 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7451 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7452 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7453 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7454 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7455 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7456 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7457 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7458 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7459 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7460 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7461 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7462 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7463 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7464 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7465 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7466 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7467 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7468 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7469 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7470 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7471 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7472
7473 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7474 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7475};
7476
7477/* AltiVec predicates. */
7478
7479struct builtin_description_predicates
7480{
7481 const unsigned int mask;
7482 const enum insn_code icode;
7483 const char *opcode;
7484 const char *const name;
7485 const enum rs6000_builtins code;
7486};
7487
7488static const struct builtin_description_predicates bdesc_altivec_preds[] =
7489{
7490 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7491 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7492 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7493 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7494 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7495 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7496 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7497 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7498 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7499 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7500 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7501 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7502 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7503
7504 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7505 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7506 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7507};
24408032 7508
a3170dc6
AH
7509/* SPE predicates. */
7510static struct builtin_description bdesc_spe_predicates[] =
7511{
7512 /* Place-holder. Leave as first. */
7513 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7514 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7515 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7516 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7517 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7518 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7519 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7520 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7521 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7522 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7523 /* Place-holder. Leave as last. */
7524 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7525};
7526
7527/* SPE evsel predicates. */
7528static struct builtin_description bdesc_spe_evsel[] =
7529{
7530 /* Place-holder. Leave as first. */
7531 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7532 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7533 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7534 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7535 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7536 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7537 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7538 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7539 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7540 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7541 /* Place-holder. Leave as last. */
7542 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7543};
7544
96038623
DE
7545/* PAIRED predicates. */
7546static const struct builtin_description bdesc_paired_preds[] =
7547{
7548 /* Place-holder. Leave as first. */
7549 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7550 /* Place-holder. Leave as last. */
7551 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7552};
7553
b6d08ca1 7554/* ABS* operations. */
100c4561
AH
7555
7556static const struct builtin_description bdesc_abs[] =
7557{
7558 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7559 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7560 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7561 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7562 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7563 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7564 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7565};
7566
617e0e1d
DB
7567/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7568 foo (VECa). */
24408032 7569
a3170dc6 7570static struct builtin_description bdesc_1arg[] =
2212663f 7571{
617e0e1d
DB
7572 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7573 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7574 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7575 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7576 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7577 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7578 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7579 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7580 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7581 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7582 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7583 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7584 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7585 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7586 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7587 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7588 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7589
58646b77
PB
7590 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7591 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7592 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7593 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7594 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7595 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7596 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7597 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7598 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7599 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7600 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7601 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7602 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7603 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7604 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7605 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7606 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7607 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7608 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7609
a3170dc6
AH
7610 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7611 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7612 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7613 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7614 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7615 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7616 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7617 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7618 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7619 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7620 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7621 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7622 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7623 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7624 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7625 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7626 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7627 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7628 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7629 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7630 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7631 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7632 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7633 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7634 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7635 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7636 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7637 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7638 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7639 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7640
7641 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7642 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7643
7644 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7645 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7646 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7647 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7648 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7649};
7650
7651static rtx
5039610b 7652rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7653{
7654 rtx pat;
5039610b 7655 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7656 rtx op0 = expand_normal (arg0);
2212663f
DB
7657 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7658 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7659
0559cc77
DE
7660 if (icode == CODE_FOR_nothing)
7661 /* Builtin not supported on this processor. */
7662 return 0;
7663
20e26713
AH
7664 /* If we got invalid arguments bail out before generating bad rtl. */
7665 if (arg0 == error_mark_node)
9a171fcd 7666 return const0_rtx;
20e26713 7667
0559cc77
DE
7668 if (icode == CODE_FOR_altivec_vspltisb
7669 || icode == CODE_FOR_altivec_vspltish
7670 || icode == CODE_FOR_altivec_vspltisw
7671 || icode == CODE_FOR_spe_evsplatfi
7672 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7673 {
7674 /* Only allow 5-bit *signed* literals. */
b44140e7 7675 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7676 || INTVAL (op0) > 15
7677 || INTVAL (op0) < -16)
b44140e7
AH
7678 {
7679 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7680 return const0_rtx;
b44140e7 7681 }
b44140e7
AH
7682 }
7683
c62f2db5 7684 if (target == 0
2212663f
DB
7685 || GET_MODE (target) != tmode
7686 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7687 target = gen_reg_rtx (tmode);
7688
7689 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7690 op0 = copy_to_mode_reg (mode0, op0);
7691
7692 pat = GEN_FCN (icode) (target, op0);
7693 if (! pat)
7694 return 0;
7695 emit_insn (pat);
0ac081f6 7696
2212663f
DB
7697 return target;
7698}
ae4b4a02 7699
100c4561 7700static rtx
5039610b 7701altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7702{
7703 rtx pat, scratch1, scratch2;
5039610b 7704 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7705 rtx op0 = expand_normal (arg0);
100c4561
AH
7706 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7707 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7708
7709 /* If we have invalid arguments, bail out before generating bad rtl. */
7710 if (arg0 == error_mark_node)
9a171fcd 7711 return const0_rtx;
100c4561
AH
7712
7713 if (target == 0
7714 || GET_MODE (target) != tmode
7715 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7716 target = gen_reg_rtx (tmode);
7717
7718 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7719 op0 = copy_to_mode_reg (mode0, op0);
7720
7721 scratch1 = gen_reg_rtx (mode0);
7722 scratch2 = gen_reg_rtx (mode0);
7723
7724 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7725 if (! pat)
7726 return 0;
7727 emit_insn (pat);
7728
7729 return target;
7730}
7731
0ac081f6 7732static rtx
5039610b 7733rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7734{
7735 rtx pat;
5039610b
SL
7736 tree arg0 = CALL_EXPR_ARG (exp, 0);
7737 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7738 rtx op0 = expand_normal (arg0);
7739 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7740 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7741 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7742 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7743
0559cc77
DE
7744 if (icode == CODE_FOR_nothing)
7745 /* Builtin not supported on this processor. */
7746 return 0;
7747
20e26713
AH
7748 /* If we got invalid arguments bail out before generating bad rtl. */
7749 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7750 return const0_rtx;
20e26713 7751
0559cc77
DE
7752 if (icode == CODE_FOR_altivec_vcfux
7753 || icode == CODE_FOR_altivec_vcfsx
7754 || icode == CODE_FOR_altivec_vctsxs
7755 || icode == CODE_FOR_altivec_vctuxs
7756 || icode == CODE_FOR_altivec_vspltb
7757 || icode == CODE_FOR_altivec_vsplth
7758 || icode == CODE_FOR_altivec_vspltw
7759 || icode == CODE_FOR_spe_evaddiw
7760 || icode == CODE_FOR_spe_evldd
7761 || icode == CODE_FOR_spe_evldh
7762 || icode == CODE_FOR_spe_evldw
7763 || icode == CODE_FOR_spe_evlhhesplat
7764 || icode == CODE_FOR_spe_evlhhossplat
7765 || icode == CODE_FOR_spe_evlhhousplat
7766 || icode == CODE_FOR_spe_evlwhe
7767 || icode == CODE_FOR_spe_evlwhos
7768 || icode == CODE_FOR_spe_evlwhou
7769 || icode == CODE_FOR_spe_evlwhsplat
7770 || icode == CODE_FOR_spe_evlwwsplat
7771 || icode == CODE_FOR_spe_evrlwi
7772 || icode == CODE_FOR_spe_evslwi
7773 || icode == CODE_FOR_spe_evsrwis
f5119d10 7774 || icode == CODE_FOR_spe_evsubifw
0559cc77 7775 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7776 {
7777 /* Only allow 5-bit unsigned literals. */
8bb418a3 7778 STRIP_NOPS (arg1);
b44140e7
AH
7779 if (TREE_CODE (arg1) != INTEGER_CST
7780 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7781 {
7782 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7783 return const0_rtx;
b44140e7 7784 }
b44140e7
AH
7785 }
7786
c62f2db5 7787 if (target == 0
0ac081f6
AH
7788 || GET_MODE (target) != tmode
7789 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7790 target = gen_reg_rtx (tmode);
7791
7792 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7793 op0 = copy_to_mode_reg (mode0, op0);
7794 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7795 op1 = copy_to_mode_reg (mode1, op1);
7796
7797 pat = GEN_FCN (icode) (target, op0, op1);
7798 if (! pat)
7799 return 0;
7800 emit_insn (pat);
7801
7802 return target;
7803}
6525c0e7 7804
ae4b4a02 7805static rtx
f676971a 7806altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7807 tree exp, rtx target)
ae4b4a02
AH
7808{
7809 rtx pat, scratch;
5039610b
SL
7810 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7811 tree arg0 = CALL_EXPR_ARG (exp, 1);
7812 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7813 rtx op0 = expand_normal (arg0);
7814 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7815 enum machine_mode tmode = SImode;
7816 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7817 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7818 int cr6_form_int;
7819
7820 if (TREE_CODE (cr6_form) != INTEGER_CST)
7821 {
7822 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7823 return const0_rtx;
ae4b4a02
AH
7824 }
7825 else
7826 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7827
37409796 7828 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7829
7830 /* If we have invalid arguments, bail out before generating bad rtl. */
7831 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7832 return const0_rtx;
ae4b4a02
AH
7833
7834 if (target == 0
7835 || GET_MODE (target) != tmode
7836 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7837 target = gen_reg_rtx (tmode);
7838
7839 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7840 op0 = copy_to_mode_reg (mode0, op0);
7841 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7842 op1 = copy_to_mode_reg (mode1, op1);
7843
7844 scratch = gen_reg_rtx (mode0);
7845
7846 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7847 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7848 if (! pat)
7849 return 0;
7850 emit_insn (pat);
7851
7852 /* The vec_any* and vec_all* predicates use the same opcodes for two
7853 different operations, but the bits in CR6 will be different
7854 depending on what information we want. So we have to play tricks
7855 with CR6 to get the right bits out.
7856
7857 If you think this is disgusting, look at the specs for the
7858 AltiVec predicates. */
7859
c4ad648e
AM
7860 switch (cr6_form_int)
7861 {
7862 case 0:
7863 emit_insn (gen_cr6_test_for_zero (target));
7864 break;
7865 case 1:
7866 emit_insn (gen_cr6_test_for_zero_reverse (target));
7867 break;
7868 case 2:
7869 emit_insn (gen_cr6_test_for_lt (target));
7870 break;
7871 case 3:
7872 emit_insn (gen_cr6_test_for_lt_reverse (target));
7873 break;
7874 default:
7875 error ("argument 1 of __builtin_altivec_predicate is out of range");
7876 break;
7877 }
ae4b4a02
AH
7878
7879 return target;
7880}
7881
96038623
DE
7882static rtx
7883paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7884{
7885 rtx pat, addr;
7886 tree arg0 = CALL_EXPR_ARG (exp, 0);
7887 tree arg1 = CALL_EXPR_ARG (exp, 1);
7888 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7889 enum machine_mode mode0 = Pmode;
7890 enum machine_mode mode1 = Pmode;
7891 rtx op0 = expand_normal (arg0);
7892 rtx op1 = expand_normal (arg1);
7893
7894 if (icode == CODE_FOR_nothing)
7895 /* Builtin not supported on this processor. */
7896 return 0;
7897
7898 /* If we got invalid arguments bail out before generating bad rtl. */
7899 if (arg0 == error_mark_node || arg1 == error_mark_node)
7900 return const0_rtx;
7901
7902 if (target == 0
7903 || GET_MODE (target) != tmode
7904 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7905 target = gen_reg_rtx (tmode);
7906
7907 op1 = copy_to_mode_reg (mode1, op1);
7908
7909 if (op0 == const0_rtx)
7910 {
7911 addr = gen_rtx_MEM (tmode, op1);
7912 }
7913 else
7914 {
7915 op0 = copy_to_mode_reg (mode0, op0);
7916 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7917 }
7918
7919 pat = GEN_FCN (icode) (target, addr);
7920
7921 if (! pat)
7922 return 0;
7923 emit_insn (pat);
7924
7925 return target;
7926}
7927
b4a62fa0 7928static rtx
5039610b 7929altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7930{
7931 rtx pat, addr;
5039610b
SL
7932 tree arg0 = CALL_EXPR_ARG (exp, 0);
7933 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7934 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7935 enum machine_mode mode0 = Pmode;
7936 enum machine_mode mode1 = Pmode;
84217346
MD
7937 rtx op0 = expand_normal (arg0);
7938 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7939
7940 if (icode == CODE_FOR_nothing)
7941 /* Builtin not supported on this processor. */
7942 return 0;
7943
7944 /* If we got invalid arguments bail out before generating bad rtl. */
7945 if (arg0 == error_mark_node || arg1 == error_mark_node)
7946 return const0_rtx;
7947
7948 if (target == 0
7949 || GET_MODE (target) != tmode
7950 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7951 target = gen_reg_rtx (tmode);
7952
f676971a 7953 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7954
7955 if (op0 == const0_rtx)
7956 {
7957 addr = gen_rtx_MEM (tmode, op1);
7958 }
7959 else
7960 {
7961 op0 = copy_to_mode_reg (mode0, op0);
7962 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7963 }
7964
7965 pat = GEN_FCN (icode) (target, addr);
7966
7967 if (! pat)
7968 return 0;
7969 emit_insn (pat);
7970
7971 return target;
7972}
7973
61bea3b0 7974static rtx
5039610b 7975spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7976{
5039610b
SL
7977 tree arg0 = CALL_EXPR_ARG (exp, 0);
7978 tree arg1 = CALL_EXPR_ARG (exp, 1);
7979 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7980 rtx op0 = expand_normal (arg0);
7981 rtx op1 = expand_normal (arg1);
7982 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7983 rtx pat;
7984 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7985 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7986 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7987
7988 /* Invalid arguments. Bail before doing anything stoopid! */
7989 if (arg0 == error_mark_node
7990 || arg1 == error_mark_node
7991 || arg2 == error_mark_node)
7992 return const0_rtx;
7993
7994 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7995 op0 = copy_to_mode_reg (mode2, op0);
7996 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7997 op1 = copy_to_mode_reg (mode0, op1);
7998 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7999 op2 = copy_to_mode_reg (mode1, op2);
8000
8001 pat = GEN_FCN (icode) (op1, op2, op0);
8002 if (pat)
8003 emit_insn (pat);
8004 return NULL_RTX;
8005}
8006
96038623
DE
8007static rtx
8008paired_expand_stv_builtin (enum insn_code icode, tree exp)
8009{
8010 tree arg0 = CALL_EXPR_ARG (exp, 0);
8011 tree arg1 = CALL_EXPR_ARG (exp, 1);
8012 tree arg2 = CALL_EXPR_ARG (exp, 2);
8013 rtx op0 = expand_normal (arg0);
8014 rtx op1 = expand_normal (arg1);
8015 rtx op2 = expand_normal (arg2);
8016 rtx pat, addr;
8017 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8018 enum machine_mode mode1 = Pmode;
8019 enum machine_mode mode2 = Pmode;
8020
8021 /* Invalid arguments. Bail before doing anything stoopid! */
8022 if (arg0 == error_mark_node
8023 || arg1 == error_mark_node
8024 || arg2 == error_mark_node)
8025 return const0_rtx;
8026
8027 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8028 op0 = copy_to_mode_reg (tmode, op0);
8029
8030 op2 = copy_to_mode_reg (mode2, op2);
8031
8032 if (op1 == const0_rtx)
8033 {
8034 addr = gen_rtx_MEM (tmode, op2);
8035 }
8036 else
8037 {
8038 op1 = copy_to_mode_reg (mode1, op1);
8039 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8040 }
8041
8042 pat = GEN_FCN (icode) (addr, op0);
8043 if (pat)
8044 emit_insn (pat);
8045 return NULL_RTX;
8046}
8047
6525c0e7 8048static rtx
5039610b 8049altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8050{
5039610b
SL
8051 tree arg0 = CALL_EXPR_ARG (exp, 0);
8052 tree arg1 = CALL_EXPR_ARG (exp, 1);
8053 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8054 rtx op0 = expand_normal (arg0);
8055 rtx op1 = expand_normal (arg1);
8056 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8057 rtx pat, addr;
8058 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8059 enum machine_mode mode1 = Pmode;
8060 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8061
8062 /* Invalid arguments. Bail before doing anything stoopid! */
8063 if (arg0 == error_mark_node
8064 || arg1 == error_mark_node
8065 || arg2 == error_mark_node)
9a171fcd 8066 return const0_rtx;
6525c0e7 8067
b4a62fa0
SB
8068 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8069 op0 = copy_to_mode_reg (tmode, op0);
8070
f676971a 8071 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8072
8073 if (op1 == const0_rtx)
8074 {
8075 addr = gen_rtx_MEM (tmode, op2);
8076 }
8077 else
8078 {
8079 op1 = copy_to_mode_reg (mode1, op1);
8080 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8081 }
6525c0e7 8082
b4a62fa0 8083 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8084 if (pat)
8085 emit_insn (pat);
8086 return NULL_RTX;
8087}
8088
2212663f 8089static rtx
5039610b 8090rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8091{
8092 rtx pat;
5039610b
SL
8093 tree arg0 = CALL_EXPR_ARG (exp, 0);
8094 tree arg1 = CALL_EXPR_ARG (exp, 1);
8095 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8096 rtx op0 = expand_normal (arg0);
8097 rtx op1 = expand_normal (arg1);
8098 rtx op2 = expand_normal (arg2);
2212663f
DB
8099 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8100 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8101 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8102 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8103
774b5662
DE
8104 if (icode == CODE_FOR_nothing)
8105 /* Builtin not supported on this processor. */
8106 return 0;
8107
20e26713
AH
8108 /* If we got invalid arguments bail out before generating bad rtl. */
8109 if (arg0 == error_mark_node
8110 || arg1 == error_mark_node
8111 || arg2 == error_mark_node)
9a171fcd 8112 return const0_rtx;
20e26713 8113
aba5fb01
NS
8114 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8115 || icode == CODE_FOR_altivec_vsldoi_v4si
8116 || icode == CODE_FOR_altivec_vsldoi_v8hi
8117 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8118 {
8119 /* Only allow 4-bit unsigned literals. */
8bb418a3 8120 STRIP_NOPS (arg2);
b44140e7
AH
8121 if (TREE_CODE (arg2) != INTEGER_CST
8122 || TREE_INT_CST_LOW (arg2) & ~0xf)
8123 {
8124 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8125 return const0_rtx;
b44140e7 8126 }
b44140e7
AH
8127 }
8128
c62f2db5 8129 if (target == 0
2212663f
DB
8130 || GET_MODE (target) != tmode
8131 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8132 target = gen_reg_rtx (tmode);
8133
8134 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8135 op0 = copy_to_mode_reg (mode0, op0);
8136 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8137 op1 = copy_to_mode_reg (mode1, op1);
8138 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8139 op2 = copy_to_mode_reg (mode2, op2);
8140
49e39588
RE
8141 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8142 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8143 else
8144 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8145 if (! pat)
8146 return 0;
8147 emit_insn (pat);
8148
8149 return target;
8150}
92898235 8151
3a9b8c7e 8152/* Expand the lvx builtins. */
0ac081f6 8153static rtx
a2369ed3 8154altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8155{
5039610b 8156 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8157 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8158 tree arg0;
8159 enum machine_mode tmode, mode0;
7c3abc73 8160 rtx pat, op0;
3a9b8c7e 8161 enum insn_code icode;
92898235 8162
0ac081f6
AH
8163 switch (fcode)
8164 {
f18c054f 8165 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8166 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8167 break;
f18c054f 8168 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8169 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8170 break;
8171 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8172 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8173 break;
8174 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8175 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8176 break;
8177 default:
8178 *expandedp = false;
8179 return NULL_RTX;
8180 }
0ac081f6 8181
3a9b8c7e 8182 *expandedp = true;
f18c054f 8183
5039610b 8184 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8185 op0 = expand_normal (arg0);
3a9b8c7e
AH
8186 tmode = insn_data[icode].operand[0].mode;
8187 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8188
3a9b8c7e
AH
8189 if (target == 0
8190 || GET_MODE (target) != tmode
8191 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8192 target = gen_reg_rtx (tmode);
24408032 8193
3a9b8c7e
AH
8194 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8195 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8196
3a9b8c7e
AH
8197 pat = GEN_FCN (icode) (target, op0);
8198 if (! pat)
8199 return 0;
8200 emit_insn (pat);
8201 return target;
8202}
f18c054f 8203
3a9b8c7e
AH
8204/* Expand the stvx builtins. */
8205static rtx
f676971a 8206altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8207 bool *expandedp)
3a9b8c7e 8208{
5039610b 8209 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8210 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8211 tree arg0, arg1;
8212 enum machine_mode mode0, mode1;
7c3abc73 8213 rtx pat, op0, op1;
3a9b8c7e 8214 enum insn_code icode;
f18c054f 8215
3a9b8c7e
AH
8216 switch (fcode)
8217 {
8218 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8219 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8220 break;
8221 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8222 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8223 break;
8224 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8225 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8226 break;
8227 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8228 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8229 break;
8230 default:
8231 *expandedp = false;
8232 return NULL_RTX;
8233 }
24408032 8234
5039610b
SL
8235 arg0 = CALL_EXPR_ARG (exp, 0);
8236 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8237 op0 = expand_normal (arg0);
8238 op1 = expand_normal (arg1);
3a9b8c7e
AH
8239 mode0 = insn_data[icode].operand[0].mode;
8240 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8241
3a9b8c7e
AH
8242 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8243 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8244 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8245 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8246
3a9b8c7e
AH
8247 pat = GEN_FCN (icode) (op0, op1);
8248 if (pat)
8249 emit_insn (pat);
f18c054f 8250
3a9b8c7e
AH
8251 *expandedp = true;
8252 return NULL_RTX;
8253}
f18c054f 8254
3a9b8c7e
AH
8255/* Expand the dst builtins. */
8256static rtx
f676971a 8257altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8258 bool *expandedp)
3a9b8c7e 8259{
5039610b 8260 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8261 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8262 tree arg0, arg1, arg2;
8263 enum machine_mode mode0, mode1, mode2;
7c3abc73 8264 rtx pat, op0, op1, op2;
586de218 8265 const struct builtin_description *d;
a3170dc6 8266 size_t i;
f18c054f 8267
3a9b8c7e 8268 *expandedp = false;
f18c054f 8269
3a9b8c7e 8270 /* Handle DST variants. */
586de218 8271 d = bdesc_dst;
3a9b8c7e
AH
8272 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8273 if (d->code == fcode)
8274 {
5039610b
SL
8275 arg0 = CALL_EXPR_ARG (exp, 0);
8276 arg1 = CALL_EXPR_ARG (exp, 1);
8277 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8278 op0 = expand_normal (arg0);
8279 op1 = expand_normal (arg1);
8280 op2 = expand_normal (arg2);
3a9b8c7e
AH
8281 mode0 = insn_data[d->icode].operand[0].mode;
8282 mode1 = insn_data[d->icode].operand[1].mode;
8283 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8284
3a9b8c7e
AH
8285 /* Invalid arguments, bail out before generating bad rtl. */
8286 if (arg0 == error_mark_node
8287 || arg1 == error_mark_node
8288 || arg2 == error_mark_node)
8289 return const0_rtx;
f18c054f 8290
86e7df90 8291 *expandedp = true;
8bb418a3 8292 STRIP_NOPS (arg2);
3a9b8c7e
AH
8293 if (TREE_CODE (arg2) != INTEGER_CST
8294 || TREE_INT_CST_LOW (arg2) & ~0x3)
8295 {
9e637a26 8296 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8297 return const0_rtx;
8298 }
f18c054f 8299
3a9b8c7e 8300 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8301 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8302 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8303 op1 = copy_to_mode_reg (mode1, op1);
24408032 8304
3a9b8c7e
AH
8305 pat = GEN_FCN (d->icode) (op0, op1, op2);
8306 if (pat != 0)
8307 emit_insn (pat);
f18c054f 8308
3a9b8c7e
AH
8309 return NULL_RTX;
8310 }
f18c054f 8311
3a9b8c7e
AH
8312 return NULL_RTX;
8313}
24408032 8314
7a4eca66
DE
8315/* Expand vec_init builtin. */
8316static rtx
5039610b 8317altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8318{
8319 enum machine_mode tmode = TYPE_MODE (type);
8320 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8321 int i, n_elt = GET_MODE_NUNITS (tmode);
8322 rtvec v = rtvec_alloc (n_elt);
8323
8324 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8325 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8326
5039610b 8327 for (i = 0; i < n_elt; ++i)
7a4eca66 8328 {
5039610b 8329 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8330 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8331 }
8332
7a4eca66
DE
8333 if (!target || !register_operand (target, tmode))
8334 target = gen_reg_rtx (tmode);
8335
8336 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8337 return target;
8338}
8339
8340/* Return the integer constant in ARG. Constrain it to be in the range
8341 of the subparts of VEC_TYPE; issue an error if not. */
8342
8343static int
8344get_element_number (tree vec_type, tree arg)
8345{
8346 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8347
8348 if (!host_integerp (arg, 1)
8349 || (elt = tree_low_cst (arg, 1), elt > max))
8350 {
8351 error ("selector must be an integer constant in the range 0..%wi", max);
8352 return 0;
8353 }
8354
8355 return elt;
8356}
8357
8358/* Expand vec_set builtin. */
8359static rtx
5039610b 8360altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8361{
8362 enum machine_mode tmode, mode1;
8363 tree arg0, arg1, arg2;
8364 int elt;
8365 rtx op0, op1;
8366
5039610b
SL
8367 arg0 = CALL_EXPR_ARG (exp, 0);
8368 arg1 = CALL_EXPR_ARG (exp, 1);
8369 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8370
8371 tmode = TYPE_MODE (TREE_TYPE (arg0));
8372 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8373 gcc_assert (VECTOR_MODE_P (tmode));
8374
8375 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8376 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8377 elt = get_element_number (TREE_TYPE (arg0), arg2);
8378
8379 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8380 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8381
8382 op0 = force_reg (tmode, op0);
8383 op1 = force_reg (mode1, op1);
8384
8385 rs6000_expand_vector_set (op0, op1, elt);
8386
8387 return op0;
8388}
8389
8390/* Expand vec_ext builtin. */
8391static rtx
5039610b 8392altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8393{
8394 enum machine_mode tmode, mode0;
8395 tree arg0, arg1;
8396 int elt;
8397 rtx op0;
8398
5039610b
SL
8399 arg0 = CALL_EXPR_ARG (exp, 0);
8400 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8401
84217346 8402 op0 = expand_normal (arg0);
7a4eca66
DE
8403 elt = get_element_number (TREE_TYPE (arg0), arg1);
8404
8405 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8406 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8407 gcc_assert (VECTOR_MODE_P (mode0));
8408
8409 op0 = force_reg (mode0, op0);
8410
8411 if (optimize || !target || !register_operand (target, tmode))
8412 target = gen_reg_rtx (tmode);
8413
8414 rs6000_expand_vector_extract (target, op0, elt);
8415
8416 return target;
8417}
8418
3a9b8c7e
AH
8419/* Expand the builtin in EXP and store the result in TARGET. Store
8420 true in *EXPANDEDP if we found a builtin to expand. */
8421static rtx
a2369ed3 8422altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8423{
586de218
KG
8424 const struct builtin_description *d;
8425 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8426 size_t i;
8427 enum insn_code icode;
5039610b 8428 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8429 tree arg0;
8430 rtx op0, pat;
8431 enum machine_mode tmode, mode0;
3a9b8c7e 8432 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8433
58646b77
PB
8434 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8435 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8436 {
8437 *expandedp = true;
ea40ba9c 8438 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8439 return const0_rtx;
8440 }
8441
3a9b8c7e
AH
8442 target = altivec_expand_ld_builtin (exp, target, expandedp);
8443 if (*expandedp)
8444 return target;
0ac081f6 8445
3a9b8c7e
AH
8446 target = altivec_expand_st_builtin (exp, target, expandedp);
8447 if (*expandedp)
8448 return target;
8449
8450 target = altivec_expand_dst_builtin (exp, target, expandedp);
8451 if (*expandedp)
8452 return target;
8453
8454 *expandedp = true;
95385cbb 8455
3a9b8c7e
AH
8456 switch (fcode)
8457 {
6525c0e7 8458 case ALTIVEC_BUILTIN_STVX:
5039610b 8459 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8460 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8461 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8462 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8463 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8464 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8465 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8466 case ALTIVEC_BUILTIN_STVXL:
5039610b 8467 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8468
95385cbb
AH
8469 case ALTIVEC_BUILTIN_MFVSCR:
8470 icode = CODE_FOR_altivec_mfvscr;
8471 tmode = insn_data[icode].operand[0].mode;
8472
8473 if (target == 0
8474 || GET_MODE (target) != tmode
8475 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8476 target = gen_reg_rtx (tmode);
f676971a 8477
95385cbb 8478 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8479 if (! pat)
8480 return 0;
8481 emit_insn (pat);
95385cbb
AH
8482 return target;
8483
8484 case ALTIVEC_BUILTIN_MTVSCR:
8485 icode = CODE_FOR_altivec_mtvscr;
5039610b 8486 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8487 op0 = expand_normal (arg0);
95385cbb
AH
8488 mode0 = insn_data[icode].operand[0].mode;
8489
8490 /* If we got invalid arguments bail out before generating bad rtl. */
8491 if (arg0 == error_mark_node)
9a171fcd 8492 return const0_rtx;
95385cbb
AH
8493
8494 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8495 op0 = copy_to_mode_reg (mode0, op0);
8496
8497 pat = GEN_FCN (icode) (op0);
8498 if (pat)
8499 emit_insn (pat);
8500 return NULL_RTX;
3a9b8c7e 8501
95385cbb
AH
8502 case ALTIVEC_BUILTIN_DSSALL:
8503 emit_insn (gen_altivec_dssall ());
8504 return NULL_RTX;
8505
8506 case ALTIVEC_BUILTIN_DSS:
8507 icode = CODE_FOR_altivec_dss;
5039610b 8508 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8509 STRIP_NOPS (arg0);
84217346 8510 op0 = expand_normal (arg0);
95385cbb
AH
8511 mode0 = insn_data[icode].operand[0].mode;
8512
8513 /* If we got invalid arguments bail out before generating bad rtl. */
8514 if (arg0 == error_mark_node)
9a171fcd 8515 return const0_rtx;
95385cbb 8516
b44140e7
AH
8517 if (TREE_CODE (arg0) != INTEGER_CST
8518 || TREE_INT_CST_LOW (arg0) & ~0x3)
8519 {
8520 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8521 return const0_rtx;
b44140e7
AH
8522 }
8523
95385cbb
AH
8524 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8525 op0 = copy_to_mode_reg (mode0, op0);
8526
8527 emit_insn (gen_altivec_dss (op0));
0ac081f6 8528 return NULL_RTX;
7a4eca66
DE
8529
8530 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8531 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8532 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8533 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8534 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8535
8536 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8537 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8538 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8539 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8540 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8541
8542 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8543 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8544 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8545 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8546 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8547
8548 default:
8549 break;
8550 /* Fall through. */
0ac081f6 8551 }
24408032 8552
100c4561 8553 /* Expand abs* operations. */
586de218 8554 d = bdesc_abs;
ca7558fc 8555 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8556 if (d->code == fcode)
5039610b 8557 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8558
ae4b4a02 8559 /* Expand the AltiVec predicates. */
586de218 8560 dp = bdesc_altivec_preds;
ca7558fc 8561 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8562 if (dp->code == fcode)
c4ad648e 8563 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8564 exp, target);
ae4b4a02 8565
6525c0e7
AH
8566 /* LV* are funky. We initialized them differently. */
8567 switch (fcode)
8568 {
8569 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8570 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8571 exp, target);
6525c0e7 8572 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8573 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8574 exp, target);
6525c0e7 8575 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8576 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8577 exp, target);
6525c0e7 8578 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8579 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8580 exp, target);
6525c0e7 8581 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8582 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8583 exp, target);
6525c0e7 8584 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8585 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8586 exp, target);
6525c0e7 8587 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8588 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8589 exp, target);
6525c0e7
AH
8590 default:
8591 break;
8592 /* Fall through. */
8593 }
95385cbb 8594
92898235 8595 *expandedp = false;
0ac081f6
AH
8596 return NULL_RTX;
8597}
8598
96038623
DE
8599/* Expand the builtin in EXP and store the result in TARGET. Store
8600 true in *EXPANDEDP if we found a builtin to expand. */
8601static rtx
8602paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8603{
8604 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8605 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8606 const struct builtin_description *d;
96038623
DE
8607 size_t i;
8608
8609 *expandedp = true;
8610
8611 switch (fcode)
8612 {
8613 case PAIRED_BUILTIN_STX:
8614 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8615 case PAIRED_BUILTIN_LX:
8616 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8617 default:
8618 break;
8619 /* Fall through. */
8620 }
8621
8622 /* Expand the paired predicates. */
23a651fc 8623 d = bdesc_paired_preds;
96038623
DE
8624 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8625 if (d->code == fcode)
8626 return paired_expand_predicate_builtin (d->icode, exp, target);
8627
8628 *expandedp = false;
8629 return NULL_RTX;
8630}
8631
a3170dc6
AH
8632/* Binops that need to be initialized manually, but can be expanded
8633 automagically by rs6000_expand_binop_builtin. */
8634static struct builtin_description bdesc_2arg_spe[] =
8635{
8636 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8637 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8638 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8639 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8640 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8641 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8642 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8643 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8644 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8645 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8646 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8647 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8648 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8649 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8650 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8651 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8652 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8653 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8654 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8655 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8656 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8657 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8658};
8659
8660/* Expand the builtin in EXP and store the result in TARGET. Store
8661 true in *EXPANDEDP if we found a builtin to expand.
8662
8663 This expands the SPE builtins that are not simple unary and binary
8664 operations. */
8665static rtx
a2369ed3 8666spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8667{
5039610b 8668 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8669 tree arg1, arg0;
8670 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8671 enum insn_code icode;
8672 enum machine_mode tmode, mode0;
8673 rtx pat, op0;
8674 struct builtin_description *d;
8675 size_t i;
8676
8677 *expandedp = true;
8678
8679 /* Syntax check for a 5-bit unsigned immediate. */
8680 switch (fcode)
8681 {
8682 case SPE_BUILTIN_EVSTDD:
8683 case SPE_BUILTIN_EVSTDH:
8684 case SPE_BUILTIN_EVSTDW:
8685 case SPE_BUILTIN_EVSTWHE:
8686 case SPE_BUILTIN_EVSTWHO:
8687 case SPE_BUILTIN_EVSTWWE:
8688 case SPE_BUILTIN_EVSTWWO:
5039610b 8689 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8690 if (TREE_CODE (arg1) != INTEGER_CST
8691 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8692 {
8693 error ("argument 2 must be a 5-bit unsigned literal");
8694 return const0_rtx;
8695 }
8696 break;
8697 default:
8698 break;
8699 }
8700
00332c9f
AH
8701 /* The evsplat*i instructions are not quite generic. */
8702 switch (fcode)
8703 {
8704 case SPE_BUILTIN_EVSPLATFI:
8705 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8706 exp, target);
00332c9f
AH
8707 case SPE_BUILTIN_EVSPLATI:
8708 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8709 exp, target);
00332c9f
AH
8710 default:
8711 break;
8712 }
8713
a3170dc6
AH
8714 d = (struct builtin_description *) bdesc_2arg_spe;
8715 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8716 if (d->code == fcode)
5039610b 8717 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8718
8719 d = (struct builtin_description *) bdesc_spe_predicates;
8720 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8721 if (d->code == fcode)
5039610b 8722 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8723
8724 d = (struct builtin_description *) bdesc_spe_evsel;
8725 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8726 if (d->code == fcode)
5039610b 8727 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8728
8729 switch (fcode)
8730 {
8731 case SPE_BUILTIN_EVSTDDX:
5039610b 8732 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8733 case SPE_BUILTIN_EVSTDHX:
5039610b 8734 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8735 case SPE_BUILTIN_EVSTDWX:
5039610b 8736 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8737 case SPE_BUILTIN_EVSTWHEX:
5039610b 8738 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8739 case SPE_BUILTIN_EVSTWHOX:
5039610b 8740 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8741 case SPE_BUILTIN_EVSTWWEX:
5039610b 8742 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8743 case SPE_BUILTIN_EVSTWWOX:
5039610b 8744 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8745 case SPE_BUILTIN_EVSTDD:
5039610b 8746 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8747 case SPE_BUILTIN_EVSTDH:
5039610b 8748 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8749 case SPE_BUILTIN_EVSTDW:
5039610b 8750 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8751 case SPE_BUILTIN_EVSTWHE:
5039610b 8752 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8753 case SPE_BUILTIN_EVSTWHO:
5039610b 8754 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8755 case SPE_BUILTIN_EVSTWWE:
5039610b 8756 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8757 case SPE_BUILTIN_EVSTWWO:
5039610b 8758 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8759 case SPE_BUILTIN_MFSPEFSCR:
8760 icode = CODE_FOR_spe_mfspefscr;
8761 tmode = insn_data[icode].operand[0].mode;
8762
8763 if (target == 0
8764 || GET_MODE (target) != tmode
8765 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8766 target = gen_reg_rtx (tmode);
f676971a 8767
a3170dc6
AH
8768 pat = GEN_FCN (icode) (target);
8769 if (! pat)
8770 return 0;
8771 emit_insn (pat);
8772 return target;
8773 case SPE_BUILTIN_MTSPEFSCR:
8774 icode = CODE_FOR_spe_mtspefscr;
5039610b 8775 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8776 op0 = expand_normal (arg0);
a3170dc6
AH
8777 mode0 = insn_data[icode].operand[0].mode;
8778
8779 if (arg0 == error_mark_node)
8780 return const0_rtx;
8781
8782 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8783 op0 = copy_to_mode_reg (mode0, op0);
8784
8785 pat = GEN_FCN (icode) (op0);
8786 if (pat)
8787 emit_insn (pat);
8788 return NULL_RTX;
8789 default:
8790 break;
8791 }
8792
8793 *expandedp = false;
8794 return NULL_RTX;
8795}
8796
96038623
DE
8797static rtx
8798paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8799{
8800 rtx pat, scratch, tmp;
8801 tree form = CALL_EXPR_ARG (exp, 0);
8802 tree arg0 = CALL_EXPR_ARG (exp, 1);
8803 tree arg1 = CALL_EXPR_ARG (exp, 2);
8804 rtx op0 = expand_normal (arg0);
8805 rtx op1 = expand_normal (arg1);
8806 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8807 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8808 int form_int;
8809 enum rtx_code code;
8810
8811 if (TREE_CODE (form) != INTEGER_CST)
8812 {
8813 error ("argument 1 of __builtin_paired_predicate must be a constant");
8814 return const0_rtx;
8815 }
8816 else
8817 form_int = TREE_INT_CST_LOW (form);
8818
8819 gcc_assert (mode0 == mode1);
8820
8821 if (arg0 == error_mark_node || arg1 == error_mark_node)
8822 return const0_rtx;
8823
8824 if (target == 0
8825 || GET_MODE (target) != SImode
8826 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8827 target = gen_reg_rtx (SImode);
8828 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8829 op0 = copy_to_mode_reg (mode0, op0);
8830 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8831 op1 = copy_to_mode_reg (mode1, op1);
8832
8833 scratch = gen_reg_rtx (CCFPmode);
8834
8835 pat = GEN_FCN (icode) (scratch, op0, op1);
8836 if (!pat)
8837 return const0_rtx;
8838
8839 emit_insn (pat);
8840
8841 switch (form_int)
8842 {
8843 /* LT bit. */
8844 case 0:
8845 code = LT;
8846 break;
8847 /* GT bit. */
8848 case 1:
8849 code = GT;
8850 break;
8851 /* EQ bit. */
8852 case 2:
8853 code = EQ;
8854 break;
8855 /* UN bit. */
8856 case 3:
8857 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8858 return target;
8859 default:
8860 error ("argument 1 of __builtin_paired_predicate is out of range");
8861 return const0_rtx;
8862 }
8863
8864 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8865 emit_move_insn (target, tmp);
8866 return target;
8867}
8868
a3170dc6 8869static rtx
5039610b 8870spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8871{
8872 rtx pat, scratch, tmp;
5039610b
SL
8873 tree form = CALL_EXPR_ARG (exp, 0);
8874 tree arg0 = CALL_EXPR_ARG (exp, 1);
8875 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8876 rtx op0 = expand_normal (arg0);
8877 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8878 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8879 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8880 int form_int;
8881 enum rtx_code code;
8882
8883 if (TREE_CODE (form) != INTEGER_CST)
8884 {
8885 error ("argument 1 of __builtin_spe_predicate must be a constant");
8886 return const0_rtx;
8887 }
8888 else
8889 form_int = TREE_INT_CST_LOW (form);
8890
37409796 8891 gcc_assert (mode0 == mode1);
a3170dc6
AH
8892
8893 if (arg0 == error_mark_node || arg1 == error_mark_node)
8894 return const0_rtx;
8895
8896 if (target == 0
8897 || GET_MODE (target) != SImode
8898 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8899 target = gen_reg_rtx (SImode);
8900
8901 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8902 op0 = copy_to_mode_reg (mode0, op0);
8903 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8904 op1 = copy_to_mode_reg (mode1, op1);
8905
8906 scratch = gen_reg_rtx (CCmode);
8907
8908 pat = GEN_FCN (icode) (scratch, op0, op1);
8909 if (! pat)
8910 return const0_rtx;
8911 emit_insn (pat);
8912
8913 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8914 _lower_. We use one compare, but look in different bits of the
8915 CR for each variant.
8916
8917 There are 2 elements in each SPE simd type (upper/lower). The CR
8918 bits are set as follows:
8919
8920 BIT0 | BIT 1 | BIT 2 | BIT 3
8921 U | L | (U | L) | (U & L)
8922
8923 So, for an "all" relationship, BIT 3 would be set.
8924 For an "any" relationship, BIT 2 would be set. Etc.
8925
8926 Following traditional nomenclature, these bits map to:
8927
8928 BIT0 | BIT 1 | BIT 2 | BIT 3
8929 LT | GT | EQ | OV
8930
8931 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8932 */
8933
8934 switch (form_int)
8935 {
8936 /* All variant. OV bit. */
8937 case 0:
8938 /* We need to get to the OV bit, which is the ORDERED bit. We
8939 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8940 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8941 So let's just use another pattern. */
8942 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8943 return target;
8944 /* Any variant. EQ bit. */
8945 case 1:
8946 code = EQ;
8947 break;
8948 /* Upper variant. LT bit. */
8949 case 2:
8950 code = LT;
8951 break;
8952 /* Lower variant. GT bit. */
8953 case 3:
8954 code = GT;
8955 break;
8956 default:
8957 error ("argument 1 of __builtin_spe_predicate is out of range");
8958 return const0_rtx;
8959 }
8960
8961 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8962 emit_move_insn (target, tmp);
8963
8964 return target;
8965}
8966
8967/* The evsel builtins look like this:
8968
8969 e = __builtin_spe_evsel_OP (a, b, c, d);
8970
8971 and work like this:
8972
8973 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8974 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8975*/
8976
8977static rtx
5039610b 8978spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8979{
8980 rtx pat, scratch;
5039610b
SL
8981 tree arg0 = CALL_EXPR_ARG (exp, 0);
8982 tree arg1 = CALL_EXPR_ARG (exp, 1);
8983 tree arg2 = CALL_EXPR_ARG (exp, 2);
8984 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8985 rtx op0 = expand_normal (arg0);
8986 rtx op1 = expand_normal (arg1);
8987 rtx op2 = expand_normal (arg2);
8988 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8989 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8990 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8991
37409796 8992 gcc_assert (mode0 == mode1);
a3170dc6
AH
8993
8994 if (arg0 == error_mark_node || arg1 == error_mark_node
8995 || arg2 == error_mark_node || arg3 == error_mark_node)
8996 return const0_rtx;
8997
8998 if (target == 0
8999 || GET_MODE (target) != mode0
9000 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9001 target = gen_reg_rtx (mode0);
9002
9003 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9004 op0 = copy_to_mode_reg (mode0, op0);
9005 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9006 op1 = copy_to_mode_reg (mode0, op1);
9007 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9008 op2 = copy_to_mode_reg (mode0, op2);
9009 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9010 op3 = copy_to_mode_reg (mode0, op3);
9011
9012 /* Generate the compare. */
9013 scratch = gen_reg_rtx (CCmode);
9014 pat = GEN_FCN (icode) (scratch, op0, op1);
9015 if (! pat)
9016 return const0_rtx;
9017 emit_insn (pat);
9018
9019 if (mode0 == V2SImode)
9020 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9021 else
9022 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9023
9024 return target;
9025}
9026
0ac081f6
AH
9027/* Expand an expression EXP that calls a built-in function,
9028 with result going to TARGET if that's convenient
9029 (and in mode MODE if that's convenient).
9030 SUBTARGET may be used as the target for computing one of EXP's operands.
9031 IGNORE is nonzero if the value is to be ignored. */
9032
9033static rtx
a2369ed3 9034rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9035 enum machine_mode mode ATTRIBUTE_UNUSED,
9036 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9037{
5039610b 9038 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9039 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9040 const struct builtin_description *d;
92898235
AH
9041 size_t i;
9042 rtx ret;
9043 bool success;
f676971a 9044
9c78b944
DE
9045 if (fcode == RS6000_BUILTIN_RECIP)
9046 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9047
9048 if (fcode == RS6000_BUILTIN_RECIPF)
9049 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9050
9051 if (fcode == RS6000_BUILTIN_RSQRTF)
9052 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9053
7ccf35ed
DN
9054 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9055 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9056 {
9057 int icode = (int) CODE_FOR_altivec_lvsr;
9058 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9059 enum machine_mode mode = insn_data[icode].operand[1].mode;
9060 tree arg;
9061 rtx op, addr, pat;
9062
37409796 9063 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9064
5039610b 9065 arg = CALL_EXPR_ARG (exp, 0);
37409796 9066 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9067 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9068 addr = memory_address (mode, op);
9069 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9070 op = addr;
9071 else
9072 {
9073 /* For the load case need to negate the address. */
9074 op = gen_reg_rtx (GET_MODE (addr));
9075 emit_insn (gen_rtx_SET (VOIDmode, op,
9076 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9077 }
7ccf35ed
DN
9078 op = gen_rtx_MEM (mode, op);
9079
9080 if (target == 0
9081 || GET_MODE (target) != tmode
9082 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9083 target = gen_reg_rtx (tmode);
9084
9085 /*pat = gen_altivec_lvsr (target, op);*/
9086 pat = GEN_FCN (icode) (target, op);
9087 if (!pat)
9088 return 0;
9089 emit_insn (pat);
9090
9091 return target;
9092 }
5039610b
SL
9093
9094 /* FIXME: There's got to be a nicer way to handle this case than
9095 constructing a new CALL_EXPR. */
f57d17f1
TM
9096 if (fcode == ALTIVEC_BUILTIN_VCFUX
9097 || fcode == ALTIVEC_BUILTIN_VCFSX)
9098 {
5039610b
SL
9099 if (call_expr_nargs (exp) == 1)
9100 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9101 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9102 }
7ccf35ed 9103
0ac081f6 9104 if (TARGET_ALTIVEC)
92898235
AH
9105 {
9106 ret = altivec_expand_builtin (exp, target, &success);
9107
a3170dc6
AH
9108 if (success)
9109 return ret;
9110 }
9111 if (TARGET_SPE)
9112 {
9113 ret = spe_expand_builtin (exp, target, &success);
9114
92898235
AH
9115 if (success)
9116 return ret;
9117 }
96038623
DE
9118 if (TARGET_PAIRED_FLOAT)
9119 {
9120 ret = paired_expand_builtin (exp, target, &success);
9121
9122 if (success)
9123 return ret;
9124 }
92898235 9125
96038623 9126 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9127
37409796
NS
9128 /* Handle simple unary operations. */
9129 d = (struct builtin_description *) bdesc_1arg;
9130 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9131 if (d->code == fcode)
5039610b 9132 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9133
37409796
NS
9134 /* Handle simple binary operations. */
9135 d = (struct builtin_description *) bdesc_2arg;
9136 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9137 if (d->code == fcode)
5039610b 9138 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9139
37409796 9140 /* Handle simple ternary operations. */
586de218 9141 d = bdesc_3arg;
37409796
NS
9142 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9143 if (d->code == fcode)
5039610b 9144 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9145
37409796 9146 gcc_unreachable ();
0ac081f6
AH
9147}
9148
7c62e993
PB
9149static tree
9150build_opaque_vector_type (tree node, int nunits)
9151{
9152 node = copy_node (node);
9153 TYPE_MAIN_VARIANT (node) = node;
0627bfb4 9154 TYPE_CANONICAL (node) = node;
7c62e993
PB
9155 return build_vector_type (node, nunits);
9156}
9157
0ac081f6 9158static void
863d938c 9159rs6000_init_builtins (void)
0ac081f6 9160{
4a5eab38
PB
9161 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9162 V2SF_type_node = build_vector_type (float_type_node, 2);
9163 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9164 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9165 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9166 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9167 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9168
9169 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9170 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9171 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9172
7c62e993
PB
9173 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9174 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9175 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9176 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9177
8bb418a3
ZL
9178 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9179 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9180 'vector unsigned short'. */
9181
8dd16ecc
NS
9182 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9183 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9184 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9185 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9186
58646b77
PB
9187 long_integer_type_internal_node = long_integer_type_node;
9188 long_unsigned_type_internal_node = long_unsigned_type_node;
9189 intQI_type_internal_node = intQI_type_node;
9190 uintQI_type_internal_node = unsigned_intQI_type_node;
9191 intHI_type_internal_node = intHI_type_node;
9192 uintHI_type_internal_node = unsigned_intHI_type_node;
9193 intSI_type_internal_node = intSI_type_node;
9194 uintSI_type_internal_node = unsigned_intSI_type_node;
9195 float_type_internal_node = float_type_node;
9196 void_type_internal_node = void_type_node;
9197
8bb418a3
ZL
9198 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9199 get_identifier ("__bool char"),
9200 bool_char_type_node));
9201 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9202 get_identifier ("__bool short"),
9203 bool_short_type_node));
9204 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9205 get_identifier ("__bool int"),
9206 bool_int_type_node));
9207 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9208 get_identifier ("__pixel"),
9209 pixel_type_node));
9210
4a5eab38
PB
9211 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9212 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9213 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9214 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9215
9216 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9217 get_identifier ("__vector unsigned char"),
9218 unsigned_V16QI_type_node));
9219 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9220 get_identifier ("__vector signed char"),
9221 V16QI_type_node));
9222 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9223 get_identifier ("__vector __bool char"),
9224 bool_V16QI_type_node));
9225
9226 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9227 get_identifier ("__vector unsigned short"),
9228 unsigned_V8HI_type_node));
9229 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9230 get_identifier ("__vector signed short"),
9231 V8HI_type_node));
9232 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9233 get_identifier ("__vector __bool short"),
9234 bool_V8HI_type_node));
9235
9236 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9237 get_identifier ("__vector unsigned int"),
9238 unsigned_V4SI_type_node));
9239 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9240 get_identifier ("__vector signed int"),
9241 V4SI_type_node));
9242 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9243 get_identifier ("__vector __bool int"),
9244 bool_V4SI_type_node));
9245
9246 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9247 get_identifier ("__vector float"),
9248 V4SF_type_node));
9249 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9250 get_identifier ("__vector __pixel"),
9251 pixel_V8HI_type_node));
9252
96038623
DE
9253 if (TARGET_PAIRED_FLOAT)
9254 paired_init_builtins ();
a3170dc6 9255 if (TARGET_SPE)
3fdaa45a 9256 spe_init_builtins ();
0ac081f6
AH
9257 if (TARGET_ALTIVEC)
9258 altivec_init_builtins ();
96038623 9259 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9260 rs6000_common_init_builtins ();
9c78b944
DE
9261 if (TARGET_PPC_GFXOPT)
9262 {
9263 tree ftype = build_function_type_list (float_type_node,
9264 float_type_node,
9265 float_type_node,
9266 NULL_TREE);
9267 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9268 RS6000_BUILTIN_RECIPF);
9269
9270 ftype = build_function_type_list (float_type_node,
9271 float_type_node,
9272 NULL_TREE);
9273 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9274 RS6000_BUILTIN_RSQRTF);
9275 }
9276 if (TARGET_POPCNTB)
9277 {
9278 tree ftype = build_function_type_list (double_type_node,
9279 double_type_node,
9280 double_type_node,
9281 NULL_TREE);
9282 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9283 RS6000_BUILTIN_RECIP);
9284
9285 }
69ca3549
DE
9286
9287#if TARGET_XCOFF
9288 /* AIX libm provides clog as __clog. */
9289 if (built_in_decls [BUILT_IN_CLOG])
9290 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9291#endif
fb220235
FXC
9292
9293#ifdef SUBTARGET_INIT_BUILTINS
9294 SUBTARGET_INIT_BUILTINS;
9295#endif
0ac081f6
AH
9296}
9297
a3170dc6
AH
9298/* Search through a set of builtins and enable the mask bits.
9299 DESC is an array of builtins.
b6d08ca1 9300 SIZE is the total number of builtins.
a3170dc6
AH
9301 START is the builtin enum at which to start.
9302 END is the builtin enum at which to end. */
0ac081f6 9303static void
a2369ed3 9304enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9305 enum rs6000_builtins start,
a2369ed3 9306 enum rs6000_builtins end)
a3170dc6
AH
9307{
9308 int i;
9309
9310 for (i = 0; i < size; ++i)
9311 if (desc[i].code == start)
9312 break;
9313
9314 if (i == size)
9315 return;
9316
9317 for (; i < size; ++i)
9318 {
9319 /* Flip all the bits on. */
9320 desc[i].mask = target_flags;
9321 if (desc[i].code == end)
9322 break;
9323 }
9324}
9325
9326static void
863d938c 9327spe_init_builtins (void)
0ac081f6 9328{
a3170dc6
AH
9329 tree endlink = void_list_node;
9330 tree puint_type_node = build_pointer_type (unsigned_type_node);
9331 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9332 struct builtin_description *d;
0ac081f6
AH
9333 size_t i;
9334
a3170dc6
AH
9335 tree v2si_ftype_4_v2si
9336 = build_function_type
3fdaa45a
AH
9337 (opaque_V2SI_type_node,
9338 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9339 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9340 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9341 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9342 endlink)))));
9343
9344 tree v2sf_ftype_4_v2sf
9345 = build_function_type
3fdaa45a
AH
9346 (opaque_V2SF_type_node,
9347 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9348 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9349 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9350 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9351 endlink)))));
9352
9353 tree int_ftype_int_v2si_v2si
9354 = build_function_type
9355 (integer_type_node,
9356 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9357 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9358 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9359 endlink))));
9360
9361 tree int_ftype_int_v2sf_v2sf
9362 = build_function_type
9363 (integer_type_node,
9364 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9365 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9366 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9367 endlink))));
9368
9369 tree void_ftype_v2si_puint_int
9370 = build_function_type (void_type_node,
3fdaa45a 9371 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9372 tree_cons (NULL_TREE, puint_type_node,
9373 tree_cons (NULL_TREE,
9374 integer_type_node,
9375 endlink))));
9376
9377 tree void_ftype_v2si_puint_char
9378 = build_function_type (void_type_node,
3fdaa45a 9379 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9380 tree_cons (NULL_TREE, puint_type_node,
9381 tree_cons (NULL_TREE,
9382 char_type_node,
9383 endlink))));
9384
9385 tree void_ftype_v2si_pv2si_int
9386 = build_function_type (void_type_node,
3fdaa45a 9387 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9388 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9389 tree_cons (NULL_TREE,
9390 integer_type_node,
9391 endlink))));
9392
9393 tree void_ftype_v2si_pv2si_char
9394 = build_function_type (void_type_node,
3fdaa45a 9395 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9396 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9397 tree_cons (NULL_TREE,
9398 char_type_node,
9399 endlink))));
9400
9401 tree void_ftype_int
9402 = build_function_type (void_type_node,
9403 tree_cons (NULL_TREE, integer_type_node, endlink));
9404
9405 tree int_ftype_void
36e8d515 9406 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9407
9408 tree v2si_ftype_pv2si_int
3fdaa45a 9409 = build_function_type (opaque_V2SI_type_node,
6035d635 9410 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9411 tree_cons (NULL_TREE, integer_type_node,
9412 endlink)));
9413
9414 tree v2si_ftype_puint_int
3fdaa45a 9415 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9416 tree_cons (NULL_TREE, puint_type_node,
9417 tree_cons (NULL_TREE, integer_type_node,
9418 endlink)));
9419
9420 tree v2si_ftype_pushort_int
3fdaa45a 9421 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9422 tree_cons (NULL_TREE, pushort_type_node,
9423 tree_cons (NULL_TREE, integer_type_node,
9424 endlink)));
9425
00332c9f
AH
9426 tree v2si_ftype_signed_char
9427 = build_function_type (opaque_V2SI_type_node,
9428 tree_cons (NULL_TREE, signed_char_type_node,
9429 endlink));
9430
a3170dc6
AH
9431 /* The initialization of the simple binary and unary builtins is
9432 done in rs6000_common_init_builtins, but we have to enable the
9433 mask bits here manually because we have run out of `target_flags'
9434 bits. We really need to redesign this mask business. */
9435
9436 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9437 ARRAY_SIZE (bdesc_2arg),
9438 SPE_BUILTIN_EVADDW,
9439 SPE_BUILTIN_EVXOR);
9440 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9441 ARRAY_SIZE (bdesc_1arg),
9442 SPE_BUILTIN_EVABS,
9443 SPE_BUILTIN_EVSUBFUSIAAW);
9444 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9445 ARRAY_SIZE (bdesc_spe_predicates),
9446 SPE_BUILTIN_EVCMPEQ,
9447 SPE_BUILTIN_EVFSTSTLT);
9448 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9449 ARRAY_SIZE (bdesc_spe_evsel),
9450 SPE_BUILTIN_EVSEL_CMPGTS,
9451 SPE_BUILTIN_EVSEL_FSTSTEQ);
9452
36252949
AH
9453 (*lang_hooks.decls.pushdecl)
9454 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9455 opaque_V2SI_type_node));
9456
a3170dc6 9457 /* Initialize irregular SPE builtins. */
f676971a 9458
a3170dc6
AH
9459 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9460 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9461 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9462 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9463 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9464 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9465 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9466 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9467 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9468 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9469 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9470 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9471 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9472 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9473 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9474 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9475 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9476 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9477
9478 /* Loads. */
9479 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9480 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9481 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9482 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9483 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9484 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9485 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9486 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9487 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9488 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9489 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9490 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9491 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9492 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9493 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9494 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9495 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9496 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9497 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9498 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9499 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9500 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9501
9502 /* Predicates. */
9503 d = (struct builtin_description *) bdesc_spe_predicates;
9504 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9505 {
9506 tree type;
9507
9508 switch (insn_data[d->icode].operand[1].mode)
9509 {
9510 case V2SImode:
9511 type = int_ftype_int_v2si_v2si;
9512 break;
9513 case V2SFmode:
9514 type = int_ftype_int_v2sf_v2sf;
9515 break;
9516 default:
37409796 9517 gcc_unreachable ();
a3170dc6
AH
9518 }
9519
9520 def_builtin (d->mask, d->name, type, d->code);
9521 }
9522
9523 /* Evsel predicates. */
9524 d = (struct builtin_description *) bdesc_spe_evsel;
9525 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9526 {
9527 tree type;
9528
9529 switch (insn_data[d->icode].operand[1].mode)
9530 {
9531 case V2SImode:
9532 type = v2si_ftype_4_v2si;
9533 break;
9534 case V2SFmode:
9535 type = v2sf_ftype_4_v2sf;
9536 break;
9537 default:
37409796 9538 gcc_unreachable ();
a3170dc6
AH
9539 }
9540
9541 def_builtin (d->mask, d->name, type, d->code);
9542 }
9543}
9544
96038623
DE
9545static void
9546paired_init_builtins (void)
9547{
23a651fc 9548 const struct builtin_description *d;
96038623
DE
9549 size_t i;
9550 tree endlink = void_list_node;
9551
9552 tree int_ftype_int_v2sf_v2sf
9553 = build_function_type
9554 (integer_type_node,
9555 tree_cons (NULL_TREE, integer_type_node,
9556 tree_cons (NULL_TREE, V2SF_type_node,
9557 tree_cons (NULL_TREE, V2SF_type_node,
9558 endlink))));
9559 tree pcfloat_type_node =
9560 build_pointer_type (build_qualified_type
9561 (float_type_node, TYPE_QUAL_CONST));
9562
9563 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9564 long_integer_type_node,
9565 pcfloat_type_node,
9566 NULL_TREE);
9567 tree void_ftype_v2sf_long_pcfloat =
9568 build_function_type_list (void_type_node,
9569 V2SF_type_node,
9570 long_integer_type_node,
9571 pcfloat_type_node,
9572 NULL_TREE);
9573
9574
9575 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9576 PAIRED_BUILTIN_LX);
9577
9578
9579 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9580 PAIRED_BUILTIN_STX);
9581
9582 /* Predicates. */
23a651fc 9583 d = bdesc_paired_preds;
96038623
DE
9584 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9585 {
9586 tree type;
9587
9588 switch (insn_data[d->icode].operand[1].mode)
9589 {
9590 case V2SFmode:
9591 type = int_ftype_int_v2sf_v2sf;
9592 break;
9593 default:
9594 gcc_unreachable ();
9595 }
9596
9597 def_builtin (d->mask, d->name, type, d->code);
9598 }
9599}
9600
a3170dc6 9601static void
863d938c 9602altivec_init_builtins (void)
a3170dc6 9603{
586de218
KG
9604 const struct builtin_description *d;
9605 const struct builtin_description_predicates *dp;
a3170dc6 9606 size_t i;
7a4eca66
DE
9607 tree ftype;
9608
a3170dc6
AH
9609 tree pfloat_type_node = build_pointer_type (float_type_node);
9610 tree pint_type_node = build_pointer_type (integer_type_node);
9611 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9612 tree pchar_type_node = build_pointer_type (char_type_node);
9613
9614 tree pvoid_type_node = build_pointer_type (void_type_node);
9615
0dbc3651
ZW
9616 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9617 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9618 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9619 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9620
9621 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9622
58646b77
PB
9623 tree int_ftype_opaque
9624 = build_function_type_list (integer_type_node,
9625 opaque_V4SI_type_node, NULL_TREE);
9626
9627 tree opaque_ftype_opaque_int
9628 = build_function_type_list (opaque_V4SI_type_node,
9629 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9630 tree opaque_ftype_opaque_opaque_int
9631 = build_function_type_list (opaque_V4SI_type_node,
9632 opaque_V4SI_type_node, opaque_V4SI_type_node,
9633 integer_type_node, NULL_TREE);
9634 tree int_ftype_int_opaque_opaque
9635 = build_function_type_list (integer_type_node,
9636 integer_type_node, opaque_V4SI_type_node,
9637 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9638 tree int_ftype_int_v4si_v4si
9639 = build_function_type_list (integer_type_node,
9640 integer_type_node, V4SI_type_node,
9641 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9642 tree v4sf_ftype_pcfloat
9643 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9644 tree void_ftype_pfloat_v4sf
b4de2f7d 9645 = build_function_type_list (void_type_node,
a3170dc6 9646 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9647 tree v4si_ftype_pcint
9648 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9649 tree void_ftype_pint_v4si
b4de2f7d
AH
9650 = build_function_type_list (void_type_node,
9651 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9652 tree v8hi_ftype_pcshort
9653 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9654 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9655 = build_function_type_list (void_type_node,
9656 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9657 tree v16qi_ftype_pcchar
9658 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9659 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9660 = build_function_type_list (void_type_node,
9661 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9662 tree void_ftype_v4si
b4de2f7d 9663 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9664 tree v8hi_ftype_void
9665 = build_function_type (V8HI_type_node, void_list_node);
9666 tree void_ftype_void
9667 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9668 tree void_ftype_int
9669 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9670
58646b77
PB
9671 tree opaque_ftype_long_pcvoid
9672 = build_function_type_list (opaque_V4SI_type_node,
9673 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9674 tree v16qi_ftype_long_pcvoid
a3170dc6 9675 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9676 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9677 tree v8hi_ftype_long_pcvoid
a3170dc6 9678 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9679 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9680 tree v4si_ftype_long_pcvoid
a3170dc6 9681 = build_function_type_list (V4SI_type_node,
b4a62fa0 9682 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9683
58646b77
PB
9684 tree void_ftype_opaque_long_pvoid
9685 = build_function_type_list (void_type_node,
9686 opaque_V4SI_type_node, long_integer_type_node,
9687 pvoid_type_node, NULL_TREE);
b4a62fa0 9688 tree void_ftype_v4si_long_pvoid
b4de2f7d 9689 = build_function_type_list (void_type_node,
b4a62fa0 9690 V4SI_type_node, long_integer_type_node,
b4de2f7d 9691 pvoid_type_node, NULL_TREE);
b4a62fa0 9692 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9693 = build_function_type_list (void_type_node,
b4a62fa0 9694 V16QI_type_node, long_integer_type_node,
b4de2f7d 9695 pvoid_type_node, NULL_TREE);
b4a62fa0 9696 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9697 = build_function_type_list (void_type_node,
b4a62fa0 9698 V8HI_type_node, long_integer_type_node,
b4de2f7d 9699 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9700 tree int_ftype_int_v8hi_v8hi
9701 = build_function_type_list (integer_type_node,
9702 integer_type_node, V8HI_type_node,
9703 V8HI_type_node, NULL_TREE);
9704 tree int_ftype_int_v16qi_v16qi
9705 = build_function_type_list (integer_type_node,
9706 integer_type_node, V16QI_type_node,
9707 V16QI_type_node, NULL_TREE);
9708 tree int_ftype_int_v4sf_v4sf
9709 = build_function_type_list (integer_type_node,
9710 integer_type_node, V4SF_type_node,
9711 V4SF_type_node, NULL_TREE);
9712 tree v4si_ftype_v4si
9713 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9714 tree v8hi_ftype_v8hi
9715 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9716 tree v16qi_ftype_v16qi
9717 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9718 tree v4sf_ftype_v4sf
9719 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9720 tree void_ftype_pcvoid_int_int
a3170dc6 9721 = build_function_type_list (void_type_node,
0dbc3651 9722 pcvoid_type_node, integer_type_node,
8bb418a3 9723 integer_type_node, NULL_TREE);
8bb418a3 9724
0dbc3651
ZW
9725 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9726 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9727 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9728 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9729 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9730 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9731 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9732 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9733 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9734 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9735 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9736 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9737 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9738 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9739 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9740 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9741 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9742 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9743 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9744 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9745 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9746 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9747 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9748 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9749 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9750 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9751 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9752 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9753 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9754 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9755 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9756 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9757 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9758 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9759 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9760 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9761 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9762 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9763 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9764 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9765 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9766 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9767 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9768 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9769 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9770 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9771
9772 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9773
9774 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9775 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9776 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9777 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9778 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9779 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9780 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9781 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9782 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9783 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9784
a3170dc6 9785 /* Add the DST variants. */
586de218 9786 d = bdesc_dst;
a3170dc6 9787 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9788 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9789
9790 /* Initialize the predicates. */
586de218 9791 dp = bdesc_altivec_preds;
a3170dc6
AH
9792 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9793 {
9794 enum machine_mode mode1;
9795 tree type;
58646b77
PB
9796 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9797 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9798
58646b77
PB
9799 if (is_overloaded)
9800 mode1 = VOIDmode;
9801 else
9802 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9803
9804 switch (mode1)
9805 {
58646b77
PB
9806 case VOIDmode:
9807 type = int_ftype_int_opaque_opaque;
9808 break;
a3170dc6
AH
9809 case V4SImode:
9810 type = int_ftype_int_v4si_v4si;
9811 break;
9812 case V8HImode:
9813 type = int_ftype_int_v8hi_v8hi;
9814 break;
9815 case V16QImode:
9816 type = int_ftype_int_v16qi_v16qi;
9817 break;
9818 case V4SFmode:
9819 type = int_ftype_int_v4sf_v4sf;
9820 break;
9821 default:
37409796 9822 gcc_unreachable ();
a3170dc6 9823 }
f676971a 9824
a3170dc6
AH
9825 def_builtin (dp->mask, dp->name, type, dp->code);
9826 }
9827
9828 /* Initialize the abs* operators. */
586de218 9829 d = bdesc_abs;
a3170dc6
AH
9830 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9831 {
9832 enum machine_mode mode0;
9833 tree type;
9834
9835 mode0 = insn_data[d->icode].operand[0].mode;
9836
9837 switch (mode0)
9838 {
9839 case V4SImode:
9840 type = v4si_ftype_v4si;
9841 break;
9842 case V8HImode:
9843 type = v8hi_ftype_v8hi;
9844 break;
9845 case V16QImode:
9846 type = v16qi_ftype_v16qi;
9847 break;
9848 case V4SFmode:
9849 type = v4sf_ftype_v4sf;
9850 break;
9851 default:
37409796 9852 gcc_unreachable ();
a3170dc6 9853 }
f676971a 9854
a3170dc6
AH
9855 def_builtin (d->mask, d->name, type, d->code);
9856 }
7ccf35ed 9857
13c62176
DN
9858 if (TARGET_ALTIVEC)
9859 {
9860 tree decl;
9861
9862 /* Initialize target builtin that implements
9863 targetm.vectorize.builtin_mask_for_load. */
9864
c79efc4d
RÁE
9865 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9866 v16qi_ftype_long_pcvoid,
9867 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9868 BUILT_IN_MD, NULL, NULL_TREE);
9869 TREE_READONLY (decl) = 1;
13c62176
DN
9870 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9871 altivec_builtin_mask_for_load = decl;
13c62176 9872 }
7a4eca66
DE
9873
9874 /* Access to the vec_init patterns. */
9875 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9876 integer_type_node, integer_type_node,
9877 integer_type_node, NULL_TREE);
9878 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9879 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9880
9881 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9882 short_integer_type_node,
9883 short_integer_type_node,
9884 short_integer_type_node,
9885 short_integer_type_node,
9886 short_integer_type_node,
9887 short_integer_type_node,
9888 short_integer_type_node, NULL_TREE);
9889 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9890 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9891
9892 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9893 char_type_node, char_type_node,
9894 char_type_node, char_type_node,
9895 char_type_node, char_type_node,
9896 char_type_node, char_type_node,
9897 char_type_node, char_type_node,
9898 char_type_node, char_type_node,
9899 char_type_node, char_type_node,
9900 char_type_node, NULL_TREE);
9901 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9902 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9903
9904 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9905 float_type_node, float_type_node,
9906 float_type_node, NULL_TREE);
9907 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9908 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9909
9910 /* Access to the vec_set patterns. */
9911 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9912 intSI_type_node,
9913 integer_type_node, NULL_TREE);
9914 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9915 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9916
9917 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9918 intHI_type_node,
9919 integer_type_node, NULL_TREE);
9920 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9921 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9922
9923 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9924 intQI_type_node,
9925 integer_type_node, NULL_TREE);
9926 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9927 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9928
9929 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9930 float_type_node,
9931 integer_type_node, NULL_TREE);
9932 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9933 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9934
9935 /* Access to the vec_extract patterns. */
9936 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9937 integer_type_node, NULL_TREE);
9938 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9939 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9940
9941 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9942 integer_type_node, NULL_TREE);
9943 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9944 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9945
9946 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9947 integer_type_node, NULL_TREE);
9948 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9949 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9950
9951 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9952 integer_type_node, NULL_TREE);
9953 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9954 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9955}
9956
9957static void
863d938c 9958rs6000_common_init_builtins (void)
a3170dc6 9959{
586de218 9960 const struct builtin_description *d;
a3170dc6
AH
9961 size_t i;
9962
96038623
DE
9963 tree v2sf_ftype_v2sf_v2sf_v2sf
9964 = build_function_type_list (V2SF_type_node,
9965 V2SF_type_node, V2SF_type_node,
9966 V2SF_type_node, NULL_TREE);
9967
a3170dc6
AH
9968 tree v4sf_ftype_v4sf_v4sf_v16qi
9969 = build_function_type_list (V4SF_type_node,
9970 V4SF_type_node, V4SF_type_node,
9971 V16QI_type_node, NULL_TREE);
9972 tree v4si_ftype_v4si_v4si_v16qi
9973 = build_function_type_list (V4SI_type_node,
9974 V4SI_type_node, V4SI_type_node,
9975 V16QI_type_node, NULL_TREE);
9976 tree v8hi_ftype_v8hi_v8hi_v16qi
9977 = build_function_type_list (V8HI_type_node,
9978 V8HI_type_node, V8HI_type_node,
9979 V16QI_type_node, NULL_TREE);
9980 tree v16qi_ftype_v16qi_v16qi_v16qi
9981 = build_function_type_list (V16QI_type_node,
9982 V16QI_type_node, V16QI_type_node,
9983 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9984 tree v4si_ftype_int
9985 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9986 tree v8hi_ftype_int
9987 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9988 tree v16qi_ftype_int
9989 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9990 tree v8hi_ftype_v16qi
9991 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9992 tree v4sf_ftype_v4sf
9993 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9994
9995 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9996 = build_function_type_list (opaque_V2SI_type_node,
9997 opaque_V2SI_type_node,
9998 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9999
96038623 10000 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10001 = build_function_type_list (opaque_V2SF_type_node,
10002 opaque_V2SF_type_node,
10003 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10004
96038623
DE
10005 tree v2sf_ftype_v2sf_v2sf
10006 = build_function_type_list (V2SF_type_node,
10007 V2SF_type_node,
10008 V2SF_type_node, NULL_TREE);
10009
10010
a3170dc6 10011 tree v2si_ftype_int_int
2abe3e28 10012 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10013 integer_type_node, integer_type_node,
10014 NULL_TREE);
10015
58646b77
PB
10016 tree opaque_ftype_opaque
10017 = build_function_type_list (opaque_V4SI_type_node,
10018 opaque_V4SI_type_node, NULL_TREE);
10019
a3170dc6 10020 tree v2si_ftype_v2si
2abe3e28
AH
10021 = build_function_type_list (opaque_V2SI_type_node,
10022 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10023
96038623 10024 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10025 = build_function_type_list (opaque_V2SF_type_node,
10026 opaque_V2SF_type_node, NULL_TREE);
f676971a 10027
96038623
DE
10028 tree v2sf_ftype_v2sf
10029 = build_function_type_list (V2SF_type_node,
10030 V2SF_type_node, NULL_TREE);
10031
a3170dc6 10032 tree v2sf_ftype_v2si
2abe3e28
AH
10033 = build_function_type_list (opaque_V2SF_type_node,
10034 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10035
10036 tree v2si_ftype_v2sf
2abe3e28
AH
10037 = build_function_type_list (opaque_V2SI_type_node,
10038 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10039
10040 tree v2si_ftype_v2si_char
2abe3e28
AH
10041 = build_function_type_list (opaque_V2SI_type_node,
10042 opaque_V2SI_type_node,
10043 char_type_node, NULL_TREE);
a3170dc6
AH
10044
10045 tree v2si_ftype_int_char
2abe3e28 10046 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10047 integer_type_node, char_type_node, NULL_TREE);
10048
10049 tree v2si_ftype_char
2abe3e28
AH
10050 = build_function_type_list (opaque_V2SI_type_node,
10051 char_type_node, NULL_TREE);
a3170dc6
AH
10052
10053 tree int_ftype_int_int
10054 = build_function_type_list (integer_type_node,
10055 integer_type_node, integer_type_node,
10056 NULL_TREE);
95385cbb 10057
58646b77
PB
10058 tree opaque_ftype_opaque_opaque
10059 = build_function_type_list (opaque_V4SI_type_node,
10060 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10061 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10062 = build_function_type_list (V4SI_type_node,
10063 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10064 tree v4sf_ftype_v4si_int
b4de2f7d 10065 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10066 V4SI_type_node, integer_type_node, NULL_TREE);
10067 tree v4si_ftype_v4sf_int
b4de2f7d 10068 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10069 V4SF_type_node, integer_type_node, NULL_TREE);
10070 tree v4si_ftype_v4si_int
b4de2f7d 10071 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10072 V4SI_type_node, integer_type_node, NULL_TREE);
10073 tree v8hi_ftype_v8hi_int
b4de2f7d 10074 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10075 V8HI_type_node, integer_type_node, NULL_TREE);
10076 tree v16qi_ftype_v16qi_int
b4de2f7d 10077 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10078 V16QI_type_node, integer_type_node, NULL_TREE);
10079 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10080 = build_function_type_list (V16QI_type_node,
10081 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10082 integer_type_node, NULL_TREE);
10083 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10084 = build_function_type_list (V8HI_type_node,
10085 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10086 integer_type_node, NULL_TREE);
10087 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10088 = build_function_type_list (V4SI_type_node,
10089 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10090 integer_type_node, NULL_TREE);
10091 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10092 = build_function_type_list (V4SF_type_node,
10093 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10094 integer_type_node, NULL_TREE);
0ac081f6 10095 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10096 = build_function_type_list (V4SF_type_node,
10097 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10098 tree opaque_ftype_opaque_opaque_opaque
10099 = build_function_type_list (opaque_V4SI_type_node,
10100 opaque_V4SI_type_node, opaque_V4SI_type_node,
10101 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10102 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10103 = build_function_type_list (V4SF_type_node,
10104 V4SF_type_node, V4SF_type_node,
10105 V4SI_type_node, NULL_TREE);
2212663f 10106 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10107 = build_function_type_list (V4SF_type_node,
10108 V4SF_type_node, V4SF_type_node,
10109 V4SF_type_node, NULL_TREE);
f676971a 10110 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10111 = build_function_type_list (V4SI_type_node,
10112 V4SI_type_node, V4SI_type_node,
10113 V4SI_type_node, NULL_TREE);
0ac081f6 10114 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10115 = build_function_type_list (V8HI_type_node,
10116 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10117 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10118 = build_function_type_list (V8HI_type_node,
10119 V8HI_type_node, V8HI_type_node,
10120 V8HI_type_node, NULL_TREE);
c4ad648e 10121 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10122 = build_function_type_list (V4SI_type_node,
10123 V8HI_type_node, V8HI_type_node,
10124 V4SI_type_node, NULL_TREE);
c4ad648e 10125 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10126 = build_function_type_list (V4SI_type_node,
10127 V16QI_type_node, V16QI_type_node,
10128 V4SI_type_node, NULL_TREE);
0ac081f6 10129 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10130 = build_function_type_list (V16QI_type_node,
10131 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10132 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10133 = build_function_type_list (V4SI_type_node,
10134 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10135 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10136 = build_function_type_list (V8HI_type_node,
10137 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10138 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10139 = build_function_type_list (V4SI_type_node,
10140 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10141 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10142 = build_function_type_list (V8HI_type_node,
10143 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10144 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10145 = build_function_type_list (V16QI_type_node,
10146 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10147 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10148 = build_function_type_list (V4SI_type_node,
10149 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10150 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10151 = build_function_type_list (V4SI_type_node,
10152 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10153 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10154 = build_function_type_list (V4SI_type_node,
10155 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10156 tree v4si_ftype_v8hi
10157 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10158 tree int_ftype_v4si_v4si
10159 = build_function_type_list (integer_type_node,
10160 V4SI_type_node, V4SI_type_node, NULL_TREE);
10161 tree int_ftype_v4sf_v4sf
10162 = build_function_type_list (integer_type_node,
10163 V4SF_type_node, V4SF_type_node, NULL_TREE);
10164 tree int_ftype_v16qi_v16qi
10165 = build_function_type_list (integer_type_node,
10166 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10167 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10168 = build_function_type_list (integer_type_node,
10169 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10170
6f317ef3 10171 /* Add the simple ternary operators. */
586de218 10172 d = bdesc_3arg;
ca7558fc 10173 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10174 {
2212663f
DB
10175 enum machine_mode mode0, mode1, mode2, mode3;
10176 tree type;
58646b77
PB
10177 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10178 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10179
58646b77
PB
10180 if (is_overloaded)
10181 {
10182 mode0 = VOIDmode;
10183 mode1 = VOIDmode;
10184 mode2 = VOIDmode;
10185 mode3 = VOIDmode;
10186 }
10187 else
10188 {
10189 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10190 continue;
f676971a 10191
58646b77
PB
10192 mode0 = insn_data[d->icode].operand[0].mode;
10193 mode1 = insn_data[d->icode].operand[1].mode;
10194 mode2 = insn_data[d->icode].operand[2].mode;
10195 mode3 = insn_data[d->icode].operand[3].mode;
10196 }
bb8df8a6 10197
2212663f
DB
10198 /* When all four are of the same mode. */
10199 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10200 {
10201 switch (mode0)
10202 {
58646b77
PB
10203 case VOIDmode:
10204 type = opaque_ftype_opaque_opaque_opaque;
10205 break;
617e0e1d
DB
10206 case V4SImode:
10207 type = v4si_ftype_v4si_v4si_v4si;
10208 break;
2212663f
DB
10209 case V4SFmode:
10210 type = v4sf_ftype_v4sf_v4sf_v4sf;
10211 break;
10212 case V8HImode:
10213 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10214 break;
2212663f
DB
10215 case V16QImode:
10216 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10217 break;
96038623
DE
10218 case V2SFmode:
10219 type = v2sf_ftype_v2sf_v2sf_v2sf;
10220 break;
2212663f 10221 default:
37409796 10222 gcc_unreachable ();
2212663f
DB
10223 }
10224 }
10225 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10226 {
2212663f
DB
10227 switch (mode0)
10228 {
10229 case V4SImode:
10230 type = v4si_ftype_v4si_v4si_v16qi;
10231 break;
10232 case V4SFmode:
10233 type = v4sf_ftype_v4sf_v4sf_v16qi;
10234 break;
10235 case V8HImode:
10236 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10237 break;
2212663f
DB
10238 case V16QImode:
10239 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10240 break;
2212663f 10241 default:
37409796 10242 gcc_unreachable ();
2212663f
DB
10243 }
10244 }
f676971a 10245 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10246 && mode3 == V4SImode)
24408032 10247 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10248 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10249 && mode3 == V4SImode)
24408032 10250 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10251 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10252 && mode3 == V4SImode)
24408032
AH
10253 type = v4sf_ftype_v4sf_v4sf_v4si;
10254
a7b376ee 10255 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10256 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10257 && mode3 == QImode)
b9e4e5d1 10258 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10259
a7b376ee 10260 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10261 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10262 && mode3 == QImode)
b9e4e5d1 10263 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10264
a7b376ee 10265 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10266 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10267 && mode3 == QImode)
b9e4e5d1 10268 type = v4si_ftype_v4si_v4si_int;
24408032 10269
a7b376ee 10270 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10271 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10272 && mode3 == QImode)
b9e4e5d1 10273 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10274
2212663f 10275 else
37409796 10276 gcc_unreachable ();
2212663f
DB
10277
10278 def_builtin (d->mask, d->name, type, d->code);
10279 }
10280
0ac081f6 10281 /* Add the simple binary operators. */
00b960c7 10282 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10283 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10284 {
10285 enum machine_mode mode0, mode1, mode2;
10286 tree type;
58646b77
PB
10287 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10288 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10289
58646b77
PB
10290 if (is_overloaded)
10291 {
10292 mode0 = VOIDmode;
10293 mode1 = VOIDmode;
10294 mode2 = VOIDmode;
10295 }
10296 else
bb8df8a6 10297 {
58646b77
PB
10298 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10299 continue;
f676971a 10300
58646b77
PB
10301 mode0 = insn_data[d->icode].operand[0].mode;
10302 mode1 = insn_data[d->icode].operand[1].mode;
10303 mode2 = insn_data[d->icode].operand[2].mode;
10304 }
0ac081f6
AH
10305
10306 /* When all three operands are of the same mode. */
10307 if (mode0 == mode1 && mode1 == mode2)
10308 {
10309 switch (mode0)
10310 {
58646b77
PB
10311 case VOIDmode:
10312 type = opaque_ftype_opaque_opaque;
10313 break;
0ac081f6
AH
10314 case V4SFmode:
10315 type = v4sf_ftype_v4sf_v4sf;
10316 break;
10317 case V4SImode:
10318 type = v4si_ftype_v4si_v4si;
10319 break;
10320 case V16QImode:
10321 type = v16qi_ftype_v16qi_v16qi;
10322 break;
10323 case V8HImode:
10324 type = v8hi_ftype_v8hi_v8hi;
10325 break;
a3170dc6
AH
10326 case V2SImode:
10327 type = v2si_ftype_v2si_v2si;
10328 break;
96038623
DE
10329 case V2SFmode:
10330 if (TARGET_PAIRED_FLOAT)
10331 type = v2sf_ftype_v2sf_v2sf;
10332 else
10333 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10334 break;
10335 case SImode:
10336 type = int_ftype_int_int;
10337 break;
0ac081f6 10338 default:
37409796 10339 gcc_unreachable ();
0ac081f6
AH
10340 }
10341 }
10342
10343 /* A few other combos we really don't want to do manually. */
10344
10345 /* vint, vfloat, vfloat. */
10346 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10347 type = v4si_ftype_v4sf_v4sf;
10348
10349 /* vshort, vchar, vchar. */
10350 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10351 type = v8hi_ftype_v16qi_v16qi;
10352
10353 /* vint, vshort, vshort. */
10354 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10355 type = v4si_ftype_v8hi_v8hi;
10356
10357 /* vshort, vint, vint. */
10358 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10359 type = v8hi_ftype_v4si_v4si;
10360
10361 /* vchar, vshort, vshort. */
10362 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10363 type = v16qi_ftype_v8hi_v8hi;
10364
10365 /* vint, vchar, vint. */
10366 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10367 type = v4si_ftype_v16qi_v4si;
10368
fa066a23
AH
10369 /* vint, vchar, vchar. */
10370 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10371 type = v4si_ftype_v16qi_v16qi;
10372
0ac081f6
AH
10373 /* vint, vshort, vint. */
10374 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10375 type = v4si_ftype_v8hi_v4si;
f676971a 10376
a7b376ee 10377 /* vint, vint, 5-bit literal. */
2212663f 10378 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10379 type = v4si_ftype_v4si_int;
f676971a 10380
a7b376ee 10381 /* vshort, vshort, 5-bit literal. */
2212663f 10382 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10383 type = v8hi_ftype_v8hi_int;
f676971a 10384
a7b376ee 10385 /* vchar, vchar, 5-bit literal. */
2212663f 10386 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10387 type = v16qi_ftype_v16qi_int;
0ac081f6 10388
a7b376ee 10389 /* vfloat, vint, 5-bit literal. */
617e0e1d 10390 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10391 type = v4sf_ftype_v4si_int;
f676971a 10392
a7b376ee 10393 /* vint, vfloat, 5-bit literal. */
617e0e1d 10394 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10395 type = v4si_ftype_v4sf_int;
617e0e1d 10396
a3170dc6
AH
10397 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10398 type = v2si_ftype_int_int;
10399
10400 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10401 type = v2si_ftype_v2si_char;
10402
10403 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10404 type = v2si_ftype_int_char;
10405
37409796 10406 else
0ac081f6 10407 {
37409796
NS
10408 /* int, x, x. */
10409 gcc_assert (mode0 == SImode);
0ac081f6
AH
10410 switch (mode1)
10411 {
10412 case V4SImode:
10413 type = int_ftype_v4si_v4si;
10414 break;
10415 case V4SFmode:
10416 type = int_ftype_v4sf_v4sf;
10417 break;
10418 case V16QImode:
10419 type = int_ftype_v16qi_v16qi;
10420 break;
10421 case V8HImode:
10422 type = int_ftype_v8hi_v8hi;
10423 break;
10424 default:
37409796 10425 gcc_unreachable ();
0ac081f6
AH
10426 }
10427 }
10428
2212663f
DB
10429 def_builtin (d->mask, d->name, type, d->code);
10430 }
24408032 10431
2212663f
DB
10432 /* Add the simple unary operators. */
10433 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10434 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10435 {
10436 enum machine_mode mode0, mode1;
10437 tree type;
58646b77
PB
10438 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10439 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10440
10441 if (is_overloaded)
10442 {
10443 mode0 = VOIDmode;
10444 mode1 = VOIDmode;
10445 }
10446 else
10447 {
10448 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10449 continue;
bb8df8a6 10450
58646b77
PB
10451 mode0 = insn_data[d->icode].operand[0].mode;
10452 mode1 = insn_data[d->icode].operand[1].mode;
10453 }
2212663f
DB
10454
10455 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10456 type = v4si_ftype_int;
2212663f 10457 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10458 type = v8hi_ftype_int;
2212663f 10459 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10460 type = v16qi_ftype_int;
58646b77
PB
10461 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10462 type = opaque_ftype_opaque;
617e0e1d
DB
10463 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10464 type = v4sf_ftype_v4sf;
20e26713
AH
10465 else if (mode0 == V8HImode && mode1 == V16QImode)
10466 type = v8hi_ftype_v16qi;
10467 else if (mode0 == V4SImode && mode1 == V8HImode)
10468 type = v4si_ftype_v8hi;
a3170dc6
AH
10469 else if (mode0 == V2SImode && mode1 == V2SImode)
10470 type = v2si_ftype_v2si;
10471 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10472 {
10473 if (TARGET_PAIRED_FLOAT)
10474 type = v2sf_ftype_v2sf;
10475 else
10476 type = v2sf_ftype_v2sf_spe;
10477 }
a3170dc6
AH
10478 else if (mode0 == V2SFmode && mode1 == V2SImode)
10479 type = v2sf_ftype_v2si;
10480 else if (mode0 == V2SImode && mode1 == V2SFmode)
10481 type = v2si_ftype_v2sf;
10482 else if (mode0 == V2SImode && mode1 == QImode)
10483 type = v2si_ftype_char;
2212663f 10484 else
37409796 10485 gcc_unreachable ();
2212663f 10486
0ac081f6
AH
10487 def_builtin (d->mask, d->name, type, d->code);
10488 }
10489}
10490
c15c90bb
ZW
10491static void
10492rs6000_init_libfuncs (void)
10493{
602ea4d3
JJ
10494 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10495 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10496 {
602ea4d3
JJ
10497 /* AIX library routines for float->int conversion. */
10498 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10499 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10500 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10501 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10502 }
c15c90bb 10503
602ea4d3 10504 if (!TARGET_IEEEQUAD)
98c41d98 10505 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10506 if (!TARGET_XL_COMPAT)
10507 {
10508 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10509 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10510 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10511 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10512
17caeff2 10513 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10514 {
10515 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10516 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10517 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10518 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10519 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10520 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10521 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10522
10523 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10524 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10525 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10526 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10527 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10528 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10529 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10530 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10531 }
b26941b4
JM
10532
10533 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10534 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10535 }
10536 else
10537 {
10538 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10539 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10540 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10541 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10542 }
c9034561 10543 else
c15c90bb 10544 {
c9034561 10545 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10546
10547 set_optab_libfunc (add_optab, TFmode, "_q_add");
10548 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10549 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10550 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10551 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10552 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10553 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10554
c9034561
ZW
10555 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10556 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10557 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10558 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10559 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10560 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10561
85363ca0
ZW
10562 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10563 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10564 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10565 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10566 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10567 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10568 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10569 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10570 }
10571}
fba73eb1
DE
10572
10573\f
10574/* Expand a block clear operation, and return 1 if successful. Return 0
10575 if we should let the compiler generate normal code.
10576
10577 operands[0] is the destination
10578 operands[1] is the length
57e84f18 10579 operands[3] is the alignment */
fba73eb1
DE
10580
10581int
10582expand_block_clear (rtx operands[])
10583{
10584 rtx orig_dest = operands[0];
10585 rtx bytes_rtx = operands[1];
57e84f18 10586 rtx align_rtx = operands[3];
5514620a
GK
10587 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10588 HOST_WIDE_INT align;
10589 HOST_WIDE_INT bytes;
fba73eb1
DE
10590 int offset;
10591 int clear_bytes;
5514620a 10592 int clear_step;
fba73eb1
DE
10593
10594 /* If this is not a fixed size move, just call memcpy */
10595 if (! constp)
10596 return 0;
10597
37409796
NS
10598 /* This must be a fixed size alignment */
10599 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10600 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10601
10602 /* Anything to clear? */
10603 bytes = INTVAL (bytes_rtx);
10604 if (bytes <= 0)
10605 return 1;
10606
5514620a
GK
10607 /* Use the builtin memset after a point, to avoid huge code bloat.
10608 When optimize_size, avoid any significant code bloat; calling
10609 memset is about 4 instructions, so allow for one instruction to
10610 load zero and three to do clearing. */
10611 if (TARGET_ALTIVEC && align >= 128)
10612 clear_step = 16;
10613 else if (TARGET_POWERPC64 && align >= 32)
10614 clear_step = 8;
21d818ff
NF
10615 else if (TARGET_SPE && align >= 64)
10616 clear_step = 8;
5514620a
GK
10617 else
10618 clear_step = 4;
fba73eb1 10619
5514620a
GK
10620 if (optimize_size && bytes > 3 * clear_step)
10621 return 0;
10622 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10623 return 0;
10624
10625 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10626 {
fba73eb1
DE
10627 enum machine_mode mode = BLKmode;
10628 rtx dest;
f676971a 10629
5514620a
GK
10630 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10631 {
10632 clear_bytes = 16;
10633 mode = V4SImode;
10634 }
21d818ff
NF
10635 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10636 {
10637 clear_bytes = 8;
10638 mode = V2SImode;
10639 }
5514620a 10640 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10641 /* 64-bit loads and stores require word-aligned
10642 displacements. */
10643 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10644 {
10645 clear_bytes = 8;
10646 mode = DImode;
fba73eb1 10647 }
5514620a 10648 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10649 { /* move 4 bytes */
10650 clear_bytes = 4;
10651 mode = SImode;
fba73eb1 10652 }
ec53fc93 10653 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10654 { /* move 2 bytes */
10655 clear_bytes = 2;
10656 mode = HImode;
fba73eb1
DE
10657 }
10658 else /* move 1 byte at a time */
10659 {
10660 clear_bytes = 1;
10661 mode = QImode;
fba73eb1 10662 }
f676971a 10663
fba73eb1 10664 dest = adjust_address (orig_dest, mode, offset);
f676971a 10665
5514620a 10666 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10667 }
10668
10669 return 1;
10670}
10671
35aff10b 10672\f
7e69e155
MM
10673/* Expand a block move operation, and return 1 if successful. Return 0
10674 if we should let the compiler generate normal code.
10675
10676 operands[0] is the destination
10677 operands[1] is the source
10678 operands[2] is the length
10679 operands[3] is the alignment */
10680
3933e0e1
MM
10681#define MAX_MOVE_REG 4
10682
7e69e155 10683int
a2369ed3 10684expand_block_move (rtx operands[])
7e69e155 10685{
b6c9286a
MM
10686 rtx orig_dest = operands[0];
10687 rtx orig_src = operands[1];
7e69e155 10688 rtx bytes_rtx = operands[2];
7e69e155 10689 rtx align_rtx = operands[3];
3933e0e1 10690 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10691 int align;
3933e0e1
MM
10692 int bytes;
10693 int offset;
7e69e155 10694 int move_bytes;
cabfd258
GK
10695 rtx stores[MAX_MOVE_REG];
10696 int num_reg = 0;
7e69e155 10697
3933e0e1 10698 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10699 if (! constp)
3933e0e1
MM
10700 return 0;
10701
37409796
NS
10702 /* This must be a fixed size alignment */
10703 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10704 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10705
7e69e155 10706 /* Anything to move? */
3933e0e1
MM
10707 bytes = INTVAL (bytes_rtx);
10708 if (bytes <= 0)
7e69e155
MM
10709 return 1;
10710
ea9982a8 10711 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10712 reg_parm_stack_space. */
ea9982a8 10713 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10714 return 0;
10715
cabfd258 10716 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10717 {
cabfd258 10718 union {
70128ad9 10719 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10720 rtx (*mov) (rtx, rtx);
cabfd258
GK
10721 } gen_func;
10722 enum machine_mode mode = BLKmode;
10723 rtx src, dest;
f676971a 10724
5514620a
GK
10725 /* Altivec first, since it will be faster than a string move
10726 when it applies, and usually not significantly larger. */
10727 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10728 {
10729 move_bytes = 16;
10730 mode = V4SImode;
10731 gen_func.mov = gen_movv4si;
10732 }
21d818ff
NF
10733 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10734 {
10735 move_bytes = 8;
10736 mode = V2SImode;
10737 gen_func.mov = gen_movv2si;
10738 }
5514620a 10739 else if (TARGET_STRING
cabfd258
GK
10740 && bytes > 24 /* move up to 32 bytes at a time */
10741 && ! fixed_regs[5]
10742 && ! fixed_regs[6]
10743 && ! fixed_regs[7]
10744 && ! fixed_regs[8]
10745 && ! fixed_regs[9]
10746 && ! fixed_regs[10]
10747 && ! fixed_regs[11]
10748 && ! fixed_regs[12])
7e69e155 10749 {
cabfd258 10750 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10751 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10752 }
10753 else if (TARGET_STRING
10754 && bytes > 16 /* move up to 24 bytes at a time */
10755 && ! fixed_regs[5]
10756 && ! fixed_regs[6]
10757 && ! fixed_regs[7]
10758 && ! fixed_regs[8]
10759 && ! fixed_regs[9]
10760 && ! fixed_regs[10])
10761 {
10762 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10763 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10764 }
10765 else if (TARGET_STRING
10766 && bytes > 8 /* move up to 16 bytes at a time */
10767 && ! fixed_regs[5]
10768 && ! fixed_regs[6]
10769 && ! fixed_regs[7]
10770 && ! fixed_regs[8])
10771 {
10772 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10773 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10774 }
10775 else if (bytes >= 8 && TARGET_POWERPC64
10776 /* 64-bit loads and stores require word-aligned
10777 displacements. */
fba73eb1 10778 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10779 {
10780 move_bytes = 8;
10781 mode = DImode;
10782 gen_func.mov = gen_movdi;
10783 }
10784 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10785 { /* move up to 8 bytes at a time */
10786 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10787 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10788 }
cd7d9ca4 10789 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10790 { /* move 4 bytes */
10791 move_bytes = 4;
10792 mode = SImode;
10793 gen_func.mov = gen_movsi;
10794 }
ec53fc93 10795 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10796 { /* move 2 bytes */
10797 move_bytes = 2;
10798 mode = HImode;
10799 gen_func.mov = gen_movhi;
10800 }
10801 else if (TARGET_STRING && bytes > 1)
10802 { /* move up to 4 bytes at a time */
10803 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10804 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10805 }
10806 else /* move 1 byte at a time */
10807 {
10808 move_bytes = 1;
10809 mode = QImode;
10810 gen_func.mov = gen_movqi;
10811 }
f676971a 10812
cabfd258
GK
10813 src = adjust_address (orig_src, mode, offset);
10814 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10815
10816 if (mode != BLKmode)
cabfd258
GK
10817 {
10818 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10819
cabfd258
GK
10820 emit_insn ((*gen_func.mov) (tmp_reg, src));
10821 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10822 }
3933e0e1 10823
cabfd258
GK
10824 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10825 {
10826 int i;
10827 for (i = 0; i < num_reg; i++)
10828 emit_insn (stores[i]);
10829 num_reg = 0;
10830 }
35aff10b 10831
cabfd258 10832 if (mode == BLKmode)
7e69e155 10833 {
70128ad9 10834 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10835 patterns require zero offset. */
10836 if (!REG_P (XEXP (src, 0)))
b6c9286a 10837 {
cabfd258
GK
10838 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10839 src = replace_equiv_address (src, src_reg);
b6c9286a 10840 }
cabfd258 10841 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10842
cabfd258 10843 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10844 {
cabfd258
GK
10845 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10846 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10847 }
cabfd258 10848 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10849
70128ad9 10850 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10851 GEN_INT (move_bytes & 31),
10852 align_rtx));
7e69e155 10853 }
7e69e155
MM
10854 }
10855
10856 return 1;
10857}
10858
d62294f5 10859\f
9caa3eb2
DE
10860/* Return a string to perform a load_multiple operation.
10861 operands[0] is the vector.
10862 operands[1] is the source address.
10863 operands[2] is the first destination register. */
10864
10865const char *
a2369ed3 10866rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10867{
10868 /* We have to handle the case where the pseudo used to contain the address
10869 is assigned to one of the output registers. */
10870 int i, j;
10871 int words = XVECLEN (operands[0], 0);
10872 rtx xop[10];
10873
10874 if (XVECLEN (operands[0], 0) == 1)
10875 return "{l|lwz} %2,0(%1)";
10876
10877 for (i = 0; i < words; i++)
10878 if (refers_to_regno_p (REGNO (operands[2]) + i,
10879 REGNO (operands[2]) + i + 1, operands[1], 0))
10880 {
10881 if (i == words-1)
10882 {
10883 xop[0] = GEN_INT (4 * (words-1));
10884 xop[1] = operands[1];
10885 xop[2] = operands[2];
10886 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10887 return "";
10888 }
10889 else if (i == 0)
10890 {
10891 xop[0] = GEN_INT (4 * (words-1));
10892 xop[1] = operands[1];
10893 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10894 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10895 return "";
10896 }
10897 else
10898 {
10899 for (j = 0; j < words; j++)
10900 if (j != i)
10901 {
10902 xop[0] = GEN_INT (j * 4);
10903 xop[1] = operands[1];
10904 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10905 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10906 }
10907 xop[0] = GEN_INT (i * 4);
10908 xop[1] = operands[1];
10909 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10910 return "";
10911 }
10912 }
10913
10914 return "{lsi|lswi} %2,%1,%N0";
10915}
10916
9878760c 10917\f
a4f6c312
SS
10918/* A validation routine: say whether CODE, a condition code, and MODE
10919 match. The other alternatives either don't make sense or should
10920 never be generated. */
39a10a29 10921
48d72335 10922void
a2369ed3 10923validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10924{
37409796
NS
10925 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10926 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10927 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10928
10929 /* These don't make sense. */
37409796
NS
10930 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10931 || mode != CCUNSmode);
39a10a29 10932
37409796
NS
10933 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10934 || mode == CCUNSmode);
39a10a29 10935
37409796
NS
10936 gcc_assert (mode == CCFPmode
10937 || (code != ORDERED && code != UNORDERED
10938 && code != UNEQ && code != LTGT
10939 && code != UNGT && code != UNLT
10940 && code != UNGE && code != UNLE));
f676971a
EC
10941
10942 /* These should never be generated except for
bc9ec0e0 10943 flag_finite_math_only. */
37409796
NS
10944 gcc_assert (mode != CCFPmode
10945 || flag_finite_math_only
10946 || (code != LE && code != GE
10947 && code != UNEQ && code != LTGT
10948 && code != UNGT && code != UNLT));
39a10a29
GK
10949
10950 /* These are invalid; the information is not there. */
37409796 10951 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10952}
10953
9878760c
RK
10954\f
10955/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10956 mask required to convert the result of a rotate insn into a shift
b1765bde 10957 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10958
10959int
a2369ed3 10960includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10961{
e2c953b6
DE
10962 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10963
10964 shift_mask <<= INTVAL (shiftop);
9878760c 10965
b1765bde 10966 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10967}
10968
10969/* Similar, but for right shift. */
10970
10971int
a2369ed3 10972includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10973{
a7653a2c 10974 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10975
10976 shift_mask >>= INTVAL (shiftop);
10977
b1765bde 10978 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10979}
10980
c5059423
AM
10981/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10982 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10983 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10984
10985int
a2369ed3 10986includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10987{
c5059423
AM
10988 if (GET_CODE (andop) == CONST_INT)
10989 {
02071907 10990 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10991
c5059423 10992 c = INTVAL (andop);
02071907 10993 if (c == 0 || c == ~0)
c5059423 10994 return 0;
e2c953b6 10995
02071907 10996 shift_mask = ~0;
c5059423
AM
10997 shift_mask <<= INTVAL (shiftop);
10998
b6d08ca1 10999 /* Find the least significant one bit. */
c5059423
AM
11000 lsb = c & -c;
11001
11002 /* It must coincide with the LSB of the shift mask. */
11003 if (-lsb != shift_mask)
11004 return 0;
e2c953b6 11005
c5059423
AM
11006 /* Invert to look for the next transition (if any). */
11007 c = ~c;
11008
11009 /* Remove the low group of ones (originally low group of zeros). */
11010 c &= -lsb;
11011
11012 /* Again find the lsb, and check we have all 1's above. */
11013 lsb = c & -c;
11014 return c == -lsb;
11015 }
11016 else if (GET_CODE (andop) == CONST_DOUBLE
11017 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11018 {
02071907
AM
11019 HOST_WIDE_INT low, high, lsb;
11020 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11021
11022 low = CONST_DOUBLE_LOW (andop);
11023 if (HOST_BITS_PER_WIDE_INT < 64)
11024 high = CONST_DOUBLE_HIGH (andop);
11025
11026 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11027 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11028 return 0;
11029
11030 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11031 {
02071907 11032 shift_mask_high = ~0;
c5059423
AM
11033 if (INTVAL (shiftop) > 32)
11034 shift_mask_high <<= INTVAL (shiftop) - 32;
11035
11036 lsb = high & -high;
11037
11038 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11039 return 0;
11040
11041 high = ~high;
11042 high &= -lsb;
11043
11044 lsb = high & -high;
11045 return high == -lsb;
11046 }
11047
02071907 11048 shift_mask_low = ~0;
c5059423
AM
11049 shift_mask_low <<= INTVAL (shiftop);
11050
11051 lsb = low & -low;
11052
11053 if (-lsb != shift_mask_low)
11054 return 0;
11055
11056 if (HOST_BITS_PER_WIDE_INT < 64)
11057 high = ~high;
11058 low = ~low;
11059 low &= -lsb;
11060
11061 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11062 {
11063 lsb = high & -high;
11064 return high == -lsb;
11065 }
11066
11067 lsb = low & -low;
11068 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11069 }
11070 else
11071 return 0;
11072}
e2c953b6 11073
c5059423
AM
11074/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11075 to perform a left shift. It must have SHIFTOP or more least
c1207243 11076 significant 0's, with the remainder of the word 1's. */
e2c953b6 11077
c5059423 11078int
a2369ed3 11079includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11080{
e2c953b6 11081 if (GET_CODE (andop) == CONST_INT)
c5059423 11082 {
02071907 11083 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11084
02071907 11085 shift_mask = ~0;
c5059423
AM
11086 shift_mask <<= INTVAL (shiftop);
11087 c = INTVAL (andop);
11088
c1207243 11089 /* Find the least significant one bit. */
c5059423
AM
11090 lsb = c & -c;
11091
11092 /* It must be covered by the shift mask.
a4f6c312 11093 This test also rejects c == 0. */
c5059423
AM
11094 if ((lsb & shift_mask) == 0)
11095 return 0;
11096
11097 /* Check we have all 1's above the transition, and reject all 1's. */
11098 return c == -lsb && lsb != 1;
11099 }
11100 else if (GET_CODE (andop) == CONST_DOUBLE
11101 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11102 {
02071907 11103 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11104
11105 low = CONST_DOUBLE_LOW (andop);
11106
11107 if (HOST_BITS_PER_WIDE_INT < 64)
11108 {
02071907 11109 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11110
11111 high = CONST_DOUBLE_HIGH (andop);
11112
11113 if (low == 0)
11114 {
02071907 11115 shift_mask_high = ~0;
c5059423
AM
11116 if (INTVAL (shiftop) > 32)
11117 shift_mask_high <<= INTVAL (shiftop) - 32;
11118
11119 lsb = high & -high;
11120
11121 if ((lsb & shift_mask_high) == 0)
11122 return 0;
11123
11124 return high == -lsb;
11125 }
11126 if (high != ~0)
11127 return 0;
11128 }
11129
02071907 11130 shift_mask_low = ~0;
c5059423
AM
11131 shift_mask_low <<= INTVAL (shiftop);
11132
11133 lsb = low & -low;
11134
11135 if ((lsb & shift_mask_low) == 0)
11136 return 0;
11137
11138 return low == -lsb && lsb != 1;
11139 }
e2c953b6 11140 else
c5059423 11141 return 0;
9878760c 11142}
35068b43 11143
11ac38b2
DE
11144/* Return 1 if operands will generate a valid arguments to rlwimi
11145instruction for insert with right shift in 64-bit mode. The mask may
11146not start on the first bit or stop on the last bit because wrap-around
11147effects of instruction do not correspond to semantics of RTL insn. */
11148
11149int
11150insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11151{
429ec7dc
DE
11152 if (INTVAL (startop) > 32
11153 && INTVAL (startop) < 64
11154 && INTVAL (sizeop) > 1
11155 && INTVAL (sizeop) + INTVAL (startop) < 64
11156 && INTVAL (shiftop) > 0
11157 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11158 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11159 return 1;
11160
11161 return 0;
11162}
11163
35068b43 11164/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11165 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11166
11167int
a2369ed3 11168registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11169{
11170 /* We might have been passed a SUBREG. */
f676971a 11171 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11172 return 0;
f676971a 11173
90f81f99
AP
11174 /* We might have been passed non floating point registers. */
11175 if (!FP_REGNO_P (REGNO (reg1))
11176 || !FP_REGNO_P (REGNO (reg2)))
11177 return 0;
35068b43
RK
11178
11179 return (REGNO (reg1) == REGNO (reg2) - 1);
11180}
11181
a4f6c312
SS
11182/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11183 addr1 and addr2 must be in consecutive memory locations
11184 (addr2 == addr1 + 8). */
35068b43
RK
11185
11186int
90f81f99 11187mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11188{
90f81f99 11189 rtx addr1, addr2;
bb8df8a6
EC
11190 unsigned int reg1, reg2;
11191 int offset1, offset2;
35068b43 11192
90f81f99
AP
11193 /* The mems cannot be volatile. */
11194 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11195 return 0;
f676971a 11196
90f81f99
AP
11197 addr1 = XEXP (mem1, 0);
11198 addr2 = XEXP (mem2, 0);
11199
35068b43
RK
11200 /* Extract an offset (if used) from the first addr. */
11201 if (GET_CODE (addr1) == PLUS)
11202 {
11203 /* If not a REG, return zero. */
11204 if (GET_CODE (XEXP (addr1, 0)) != REG)
11205 return 0;
11206 else
11207 {
c4ad648e 11208 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11209 /* The offset must be constant! */
11210 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11211 return 0;
11212 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11213 }
11214 }
11215 else if (GET_CODE (addr1) != REG)
11216 return 0;
11217 else
11218 {
11219 reg1 = REGNO (addr1);
11220 /* This was a simple (mem (reg)) expression. Offset is 0. */
11221 offset1 = 0;
11222 }
11223
bb8df8a6
EC
11224 /* And now for the second addr. */
11225 if (GET_CODE (addr2) == PLUS)
11226 {
11227 /* If not a REG, return zero. */
11228 if (GET_CODE (XEXP (addr2, 0)) != REG)
11229 return 0;
11230 else
11231 {
11232 reg2 = REGNO (XEXP (addr2, 0));
11233 /* The offset must be constant. */
11234 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11235 return 0;
11236 offset2 = INTVAL (XEXP (addr2, 1));
11237 }
11238 }
11239 else if (GET_CODE (addr2) != REG)
35068b43 11240 return 0;
bb8df8a6
EC
11241 else
11242 {
11243 reg2 = REGNO (addr2);
11244 /* This was a simple (mem (reg)) expression. Offset is 0. */
11245 offset2 = 0;
11246 }
35068b43 11247
bb8df8a6
EC
11248 /* Both of these must have the same base register. */
11249 if (reg1 != reg2)
35068b43
RK
11250 return 0;
11251
11252 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11253 if (offset2 != offset1 + 8)
35068b43
RK
11254 return 0;
11255
11256 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11257 instructions. */
11258 return 1;
11259}
9878760c 11260\f
e41b2a33
PB
11261
11262rtx
11263rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11264{
11265 static bool eliminated = false;
11266 if (mode != SDmode)
11267 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11268 else
11269 {
11270 rtx mem = cfun->machine->sdmode_stack_slot;
11271 gcc_assert (mem != NULL_RTX);
11272
11273 if (!eliminated)
11274 {
11275 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11276 cfun->machine->sdmode_stack_slot = mem;
11277 eliminated = true;
11278 }
11279 return mem;
11280 }
11281}
11282
11283static tree
11284rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11285{
11286 /* Don't walk into types. */
11287 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11288 {
11289 *walk_subtrees = 0;
11290 return NULL_TREE;
11291 }
11292
11293 switch (TREE_CODE (*tp))
11294 {
11295 case VAR_DECL:
11296 case PARM_DECL:
11297 case FIELD_DECL:
11298 case RESULT_DECL:
11299 case REAL_CST:
fdf4f148 11300 case INDIRECT_REF:
a0f39282
JJ
11301 case ALIGN_INDIRECT_REF:
11302 case MISALIGNED_INDIRECT_REF:
fdf4f148 11303 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11304 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11305 return *tp;
11306 break;
11307 default:
11308 break;
11309 }
11310
11311 return NULL_TREE;
11312}
11313
11314
11315/* Allocate a 64-bit stack slot to be used for copying SDmode
11316 values through if this function has any SDmode references. */
11317
11318static void
11319rs6000_alloc_sdmode_stack_slot (void)
11320{
11321 tree t;
11322 basic_block bb;
11323 block_stmt_iterator bsi;
11324
11325 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11326
11327 FOR_EACH_BB (bb)
11328 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11329 {
11330 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11331 rs6000_check_sdmode, NULL);
11332 if (ret)
11333 {
11334 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11335 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11336 SDmode, 0);
11337 return;
11338 }
11339 }
11340
11341 /* Check for any SDmode parameters of the function. */
11342 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11343 {
11344 if (TREE_TYPE (t) == error_mark_node)
11345 continue;
11346
11347 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11348 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11349 {
11350 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11351 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11352 SDmode, 0);
11353 return;
11354 }
11355 }
11356}
11357
11358static void
11359rs6000_instantiate_decls (void)
11360{
11361 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11362 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11363}
11364
9878760c
RK
11365/* Return the register class of a scratch register needed to copy IN into
11366 or out of a register in CLASS in MODE. If it can be done directly,
11367 NO_REGS is returned. */
11368
11369enum reg_class
3c4774e0
R
11370rs6000_secondary_reload_class (enum reg_class class,
11371 enum machine_mode mode ATTRIBUTE_UNUSED,
11372 rtx in)
9878760c 11373{
5accd822 11374 int regno;
9878760c 11375
ab82a49f
AP
11376 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11377#if TARGET_MACHO
c4ad648e 11378 && MACHOPIC_INDIRECT
ab82a49f 11379#endif
c4ad648e 11380 ))
46fad5b7
DJ
11381 {
11382 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11383 other than BASE_REGS for TARGET_ELF. So indicate that a
11384 register from BASE_REGS is needed as an intermediate
11385 register.
f676971a 11386
46fad5b7
DJ
11387 On Darwin, pic addresses require a load from memory, which
11388 needs a base register. */
11389 if (class != BASE_REGS
c4ad648e
AM
11390 && (GET_CODE (in) == SYMBOL_REF
11391 || GET_CODE (in) == HIGH
11392 || GET_CODE (in) == LABEL_REF
11393 || GET_CODE (in) == CONST))
11394 return BASE_REGS;
46fad5b7 11395 }
e7b7998a 11396
5accd822
DE
11397 if (GET_CODE (in) == REG)
11398 {
11399 regno = REGNO (in);
11400 if (regno >= FIRST_PSEUDO_REGISTER)
11401 {
11402 regno = true_regnum (in);
11403 if (regno >= FIRST_PSEUDO_REGISTER)
11404 regno = -1;
11405 }
11406 }
11407 else if (GET_CODE (in) == SUBREG)
11408 {
11409 regno = true_regnum (in);
11410 if (regno >= FIRST_PSEUDO_REGISTER)
11411 regno = -1;
11412 }
11413 else
11414 regno = -1;
11415
9878760c
RK
11416 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11417 into anything. */
11418 if (class == GENERAL_REGS || class == BASE_REGS
11419 || (regno >= 0 && INT_REGNO_P (regno)))
11420 return NO_REGS;
11421
11422 /* Constants, memory, and FP registers can go into FP registers. */
11423 if ((regno == -1 || FP_REGNO_P (regno))
11424 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11425 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11426
0ac081f6
AH
11427 /* Memory, and AltiVec registers can go into AltiVec registers. */
11428 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11429 && class == ALTIVEC_REGS)
11430 return NO_REGS;
11431
9878760c
RK
11432 /* We can copy among the CR registers. */
11433 if ((class == CR_REGS || class == CR0_REGS)
11434 && regno >= 0 && CR_REGNO_P (regno))
11435 return NO_REGS;
11436
11437 /* Otherwise, we need GENERAL_REGS. */
11438 return GENERAL_REGS;
11439}
11440\f
11441/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11442 know this is a valid comparison.
9878760c
RK
11443
11444 SCC_P is 1 if this is for an scc. That means that %D will have been
11445 used instead of %C, so the bits will be in different places.
11446
b4ac57ab 11447 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11448
11449int
a2369ed3 11450ccr_bit (rtx op, int scc_p)
9878760c
RK
11451{
11452 enum rtx_code code = GET_CODE (op);
11453 enum machine_mode cc_mode;
11454 int cc_regnum;
11455 int base_bit;
9ebbca7d 11456 rtx reg;
9878760c 11457
ec8e098d 11458 if (!COMPARISON_P (op))
9878760c
RK
11459 return -1;
11460
9ebbca7d
GK
11461 reg = XEXP (op, 0);
11462
37409796 11463 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11464
11465 cc_mode = GET_MODE (reg);
11466 cc_regnum = REGNO (reg);
11467 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11468
39a10a29 11469 validate_condition_mode (code, cc_mode);
c5defebb 11470
b7053a3f
GK
11471 /* When generating a sCOND operation, only positive conditions are
11472 allowed. */
37409796
NS
11473 gcc_assert (!scc_p
11474 || code == EQ || code == GT || code == LT || code == UNORDERED
11475 || code == GTU || code == LTU);
f676971a 11476
9878760c
RK
11477 switch (code)
11478 {
11479 case NE:
11480 return scc_p ? base_bit + 3 : base_bit + 2;
11481 case EQ:
11482 return base_bit + 2;
1c882ea4 11483 case GT: case GTU: case UNLE:
9878760c 11484 return base_bit + 1;
1c882ea4 11485 case LT: case LTU: case UNGE:
9878760c 11486 return base_bit;
1c882ea4
GK
11487 case ORDERED: case UNORDERED:
11488 return base_bit + 3;
9878760c
RK
11489
11490 case GE: case GEU:
39a10a29 11491 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11492 unordered position. So test that bit. For integer, this is ! LT
11493 unless this is an scc insn. */
39a10a29 11494 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11495
11496 case LE: case LEU:
39a10a29 11497 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11498
9878760c 11499 default:
37409796 11500 gcc_unreachable ();
9878760c
RK
11501 }
11502}
1ff7789b 11503\f
8d30c4ee 11504/* Return the GOT register. */
1ff7789b 11505
9390387d 11506rtx
a2369ed3 11507rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11508{
a4f6c312
SS
11509 /* The second flow pass currently (June 1999) can't update
11510 regs_ever_live without disturbing other parts of the compiler, so
11511 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11512 if (!can_create_pseudo_p ()
11513 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11514 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11515
e3b5732b 11516 crtl->uses_pic_offset_table = 1;
3cb999d8 11517
1ff7789b
MM
11518 return pic_offset_table_rtx;
11519}
a7df97e6 11520\f
e2500fed
GK
11521/* Function to init struct machine_function.
11522 This will be called, via a pointer variable,
11523 from push_function_context. */
a7df97e6 11524
e2500fed 11525static struct machine_function *
863d938c 11526rs6000_init_machine_status (void)
a7df97e6 11527{
5ead67f6 11528 return GGC_CNEW (machine_function);
a7df97e6 11529}
9878760c 11530\f
0ba1b2ff
AM
11531/* These macros test for integers and extract the low-order bits. */
11532#define INT_P(X) \
11533((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11534 && GET_MODE (X) == VOIDmode)
11535
11536#define INT_LOWPART(X) \
11537 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11538
11539int
a2369ed3 11540extract_MB (rtx op)
0ba1b2ff
AM
11541{
11542 int i;
11543 unsigned long val = INT_LOWPART (op);
11544
11545 /* If the high bit is zero, the value is the first 1 bit we find
11546 from the left. */
11547 if ((val & 0x80000000) == 0)
11548 {
37409796 11549 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11550
11551 i = 1;
11552 while (((val <<= 1) & 0x80000000) == 0)
11553 ++i;
11554 return i;
11555 }
11556
11557 /* If the high bit is set and the low bit is not, or the mask is all
11558 1's, the value is zero. */
11559 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11560 return 0;
11561
11562 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11563 from the right. */
11564 i = 31;
11565 while (((val >>= 1) & 1) != 0)
11566 --i;
11567
11568 return i;
11569}
11570
11571int
a2369ed3 11572extract_ME (rtx op)
0ba1b2ff
AM
11573{
11574 int i;
11575 unsigned long val = INT_LOWPART (op);
11576
11577 /* If the low bit is zero, the value is the first 1 bit we find from
11578 the right. */
11579 if ((val & 1) == 0)
11580 {
37409796 11581 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11582
11583 i = 30;
11584 while (((val >>= 1) & 1) == 0)
11585 --i;
11586
11587 return i;
11588 }
11589
11590 /* If the low bit is set and the high bit is not, or the mask is all
11591 1's, the value is 31. */
11592 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11593 return 31;
11594
11595 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11596 from the left. */
11597 i = 0;
11598 while (((val <<= 1) & 0x80000000) != 0)
11599 ++i;
11600
11601 return i;
11602}
11603
c4501e62
JJ
11604/* Locate some local-dynamic symbol still in use by this function
11605 so that we can print its name in some tls_ld pattern. */
11606
11607static const char *
863d938c 11608rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11609{
11610 rtx insn;
11611
11612 if (cfun->machine->some_ld_name)
11613 return cfun->machine->some_ld_name;
11614
11615 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11616 if (INSN_P (insn)
11617 && for_each_rtx (&PATTERN (insn),
11618 rs6000_get_some_local_dynamic_name_1, 0))
11619 return cfun->machine->some_ld_name;
11620
37409796 11621 gcc_unreachable ();
c4501e62
JJ
11622}
11623
11624/* Helper function for rs6000_get_some_local_dynamic_name. */
11625
11626static int
a2369ed3 11627rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11628{
11629 rtx x = *px;
11630
11631 if (GET_CODE (x) == SYMBOL_REF)
11632 {
11633 const char *str = XSTR (x, 0);
11634 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11635 {
11636 cfun->machine->some_ld_name = str;
11637 return 1;
11638 }
11639 }
11640
11641 return 0;
11642}
11643
85b776df
AM
11644/* Write out a function code label. */
11645
11646void
11647rs6000_output_function_entry (FILE *file, const char *fname)
11648{
11649 if (fname[0] != '.')
11650 {
11651 switch (DEFAULT_ABI)
11652 {
11653 default:
37409796 11654 gcc_unreachable ();
85b776df
AM
11655
11656 case ABI_AIX:
11657 if (DOT_SYMBOLS)
11658 putc ('.', file);
11659 else
11660 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11661 break;
11662
11663 case ABI_V4:
11664 case ABI_DARWIN:
11665 break;
11666 }
11667 }
11668 if (TARGET_AIX)
11669 RS6000_OUTPUT_BASENAME (file, fname);
11670 else
11671 assemble_name (file, fname);
11672}
11673
9878760c
RK
11674/* Print an operand. Recognize special options, documented below. */
11675
38c1f2d7 11676#if TARGET_ELF
d9407988 11677#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11678#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11679#else
11680#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11681#define SMALL_DATA_REG 0
ba5e43aa
MM
11682#endif
11683
9878760c 11684void
a2369ed3 11685print_operand (FILE *file, rtx x, int code)
9878760c
RK
11686{
11687 int i;
a260abc9 11688 HOST_WIDE_INT val;
0ba1b2ff 11689 unsigned HOST_WIDE_INT uval;
9878760c
RK
11690
11691 switch (code)
11692 {
a8b3aeda 11693 case '.':
a85d226b
RK
11694 /* Write out an instruction after the call which may be replaced
11695 with glue code by the loader. This depends on the AIX version. */
11696 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11697 return;
11698
81eace42
GK
11699 /* %a is output_address. */
11700
9854d9ed
RK
11701 case 'A':
11702 /* If X is a constant integer whose low-order 5 bits are zero,
11703 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11704 in the AIX assembler where "sri" with a zero shift count
20e26713 11705 writes a trash instruction. */
9854d9ed 11706 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11707 putc ('l', file);
9854d9ed 11708 else
76229ac8 11709 putc ('r', file);
9854d9ed
RK
11710 return;
11711
11712 case 'b':
e2c953b6
DE
11713 /* If constant, low-order 16 bits of constant, unsigned.
11714 Otherwise, write normally. */
11715 if (INT_P (x))
11716 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11717 else
11718 print_operand (file, x, 0);
cad12a8d
RK
11719 return;
11720
a260abc9
DE
11721 case 'B':
11722 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11723 for 64-bit mask direction. */
9390387d 11724 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11725 return;
a260abc9 11726
81eace42
GK
11727 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11728 output_operand. */
11729
423c1189
AH
11730 case 'c':
11731 /* X is a CR register. Print the number of the GT bit of the CR. */
11732 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11733 output_operand_lossage ("invalid %%E value");
11734 else
11735 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11736 return;
11737
11738 case 'D':
cef6b86c 11739 /* Like 'J' but get to the GT bit only. */
37409796 11740 gcc_assert (GET_CODE (x) == REG);
423c1189 11741
cef6b86c
EB
11742 /* Bit 1 is GT bit. */
11743 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11744
cef6b86c
EB
11745 /* Add one for shift count in rlinm for scc. */
11746 fprintf (file, "%d", i + 1);
423c1189
AH
11747 return;
11748
9854d9ed 11749 case 'E':
39a10a29 11750 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11751 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11752 output_operand_lossage ("invalid %%E value");
78fbdbf7 11753 else
39a10a29 11754 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11755 return;
9854d9ed
RK
11756
11757 case 'f':
11758 /* X is a CR register. Print the shift count needed to move it
11759 to the high-order four bits. */
11760 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11761 output_operand_lossage ("invalid %%f value");
11762 else
9ebbca7d 11763 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11764 return;
11765
11766 case 'F':
11767 /* Similar, but print the count for the rotate in the opposite
11768 direction. */
11769 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11770 output_operand_lossage ("invalid %%F value");
11771 else
9ebbca7d 11772 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11773 return;
11774
11775 case 'G':
11776 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11777 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11778 if (GET_CODE (x) != CONST_INT)
11779 output_operand_lossage ("invalid %%G value");
11780 else if (INTVAL (x) >= 0)
76229ac8 11781 putc ('z', file);
9854d9ed 11782 else
76229ac8 11783 putc ('m', file);
9854d9ed 11784 return;
e2c953b6 11785
9878760c 11786 case 'h':
a4f6c312
SS
11787 /* If constant, output low-order five bits. Otherwise, write
11788 normally. */
9878760c 11789 if (INT_P (x))
5f59ecb7 11790 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11791 else
11792 print_operand (file, x, 0);
11793 return;
11794
64305719 11795 case 'H':
a4f6c312
SS
11796 /* If constant, output low-order six bits. Otherwise, write
11797 normally. */
64305719 11798 if (INT_P (x))
5f59ecb7 11799 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11800 else
11801 print_operand (file, x, 0);
11802 return;
11803
9854d9ed
RK
11804 case 'I':
11805 /* Print `i' if this is a constant, else nothing. */
9878760c 11806 if (INT_P (x))
76229ac8 11807 putc ('i', file);
9878760c
RK
11808 return;
11809
9854d9ed
RK
11810 case 'j':
11811 /* Write the bit number in CCR for jump. */
11812 i = ccr_bit (x, 0);
11813 if (i == -1)
11814 output_operand_lossage ("invalid %%j code");
9878760c 11815 else
9854d9ed 11816 fprintf (file, "%d", i);
9878760c
RK
11817 return;
11818
9854d9ed
RK
11819 case 'J':
11820 /* Similar, but add one for shift count in rlinm for scc and pass
11821 scc flag to `ccr_bit'. */
11822 i = ccr_bit (x, 1);
11823 if (i == -1)
11824 output_operand_lossage ("invalid %%J code");
11825 else
a0466a68
RK
11826 /* If we want bit 31, write a shift count of zero, not 32. */
11827 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11828 return;
11829
9854d9ed
RK
11830 case 'k':
11831 /* X must be a constant. Write the 1's complement of the
11832 constant. */
9878760c 11833 if (! INT_P (x))
9854d9ed 11834 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11835 else
11836 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11837 return;
11838
81eace42 11839 case 'K':
9ebbca7d
GK
11840 /* X must be a symbolic constant on ELF. Write an
11841 expression suitable for an 'addi' that adds in the low 16
11842 bits of the MEM. */
11843 if (GET_CODE (x) != CONST)
11844 {
11845 print_operand_address (file, x);
11846 fputs ("@l", file);
11847 }
11848 else
11849 {
11850 if (GET_CODE (XEXP (x, 0)) != PLUS
11851 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11852 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11853 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11854 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11855 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11856 fputs ("@l", file);
ed8d2920
MM
11857 /* For GNU as, there must be a non-alphanumeric character
11858 between 'l' and the number. The '-' is added by
11859 print_operand() already. */
11860 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11861 fputs ("+", file);
9ebbca7d
GK
11862 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11863 }
81eace42
GK
11864 return;
11865
11866 /* %l is output_asm_label. */
9ebbca7d 11867
9854d9ed
RK
11868 case 'L':
11869 /* Write second word of DImode or DFmode reference. Works on register
11870 or non-indexed memory only. */
11871 if (GET_CODE (x) == REG)
fb5c67a7 11872 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11873 else if (GET_CODE (x) == MEM)
11874 {
11875 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11876 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11877 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11878 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11879 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11880 UNITS_PER_WORD));
6fb5fa3c
DB
11881 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11882 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11883 UNITS_PER_WORD));
9854d9ed 11884 else
d7624dc0
RK
11885 output_address (XEXP (adjust_address_nv (x, SImode,
11886 UNITS_PER_WORD),
11887 0));
ed8908e7 11888
ba5e43aa 11889 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11890 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11891 reg_names[SMALL_DATA_REG]);
9854d9ed 11892 }
9878760c 11893 return;
f676971a 11894
9878760c
RK
11895 case 'm':
11896 /* MB value for a mask operand. */
b1765bde 11897 if (! mask_operand (x, SImode))
9878760c
RK
11898 output_operand_lossage ("invalid %%m value");
11899
0ba1b2ff 11900 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11901 return;
11902
11903 case 'M':
11904 /* ME value for a mask operand. */
b1765bde 11905 if (! mask_operand (x, SImode))
a260abc9 11906 output_operand_lossage ("invalid %%M value");
9878760c 11907
0ba1b2ff 11908 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11909 return;
11910
81eace42
GK
11911 /* %n outputs the negative of its operand. */
11912
9878760c
RK
11913 case 'N':
11914 /* Write the number of elements in the vector times 4. */
11915 if (GET_CODE (x) != PARALLEL)
11916 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11917 else
11918 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11919 return;
11920
11921 case 'O':
11922 /* Similar, but subtract 1 first. */
11923 if (GET_CODE (x) != PARALLEL)
1427100a 11924 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11925 else
11926 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11927 return;
11928
9854d9ed
RK
11929 case 'p':
11930 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11931 if (! INT_P (x)
2bfcf297 11932 || INT_LOWPART (x) < 0
9854d9ed
RK
11933 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11934 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11935 else
11936 fprintf (file, "%d", i);
9854d9ed
RK
11937 return;
11938
9878760c
RK
11939 case 'P':
11940 /* The operand must be an indirect memory reference. The result
8bb418a3 11941 is the register name. */
9878760c
RK
11942 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11943 || REGNO (XEXP (x, 0)) >= 32)
11944 output_operand_lossage ("invalid %%P value");
e2c953b6 11945 else
fb5c67a7 11946 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11947 return;
11948
dfbdccdb
GK
11949 case 'q':
11950 /* This outputs the logical code corresponding to a boolean
11951 expression. The expression may have one or both operands
39a10a29 11952 negated (if one, only the first one). For condition register
c4ad648e
AM
11953 logical operations, it will also treat the negated
11954 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11955 {
63bc1d05 11956 const char *const *t = 0;
dfbdccdb
GK
11957 const char *s;
11958 enum rtx_code code = GET_CODE (x);
11959 static const char * const tbl[3][3] = {
11960 { "and", "andc", "nor" },
11961 { "or", "orc", "nand" },
11962 { "xor", "eqv", "xor" } };
11963
11964 if (code == AND)
11965 t = tbl[0];
11966 else if (code == IOR)
11967 t = tbl[1];
11968 else if (code == XOR)
11969 t = tbl[2];
11970 else
11971 output_operand_lossage ("invalid %%q value");
11972
11973 if (GET_CODE (XEXP (x, 0)) != NOT)
11974 s = t[0];
11975 else
11976 {
11977 if (GET_CODE (XEXP (x, 1)) == NOT)
11978 s = t[2];
11979 else
11980 s = t[1];
11981 }
f676971a 11982
dfbdccdb
GK
11983 fputs (s, file);
11984 }
11985 return;
11986
2c4a9cff
DE
11987 case 'Q':
11988 if (TARGET_MFCRF)
3b6ce0af 11989 fputc (',', file);
5efb1046 11990 /* FALLTHRU */
2c4a9cff
DE
11991 else
11992 return;
11993
9854d9ed
RK
11994 case 'R':
11995 /* X is a CR register. Print the mask for `mtcrf'. */
11996 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11997 output_operand_lossage ("invalid %%R value");
11998 else
9ebbca7d 11999 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12000 return;
9854d9ed
RK
12001
12002 case 's':
12003 /* Low 5 bits of 32 - value */
12004 if (! INT_P (x))
12005 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12006 else
12007 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12008 return;
9854d9ed 12009
a260abc9 12010 case 'S':
0ba1b2ff 12011 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12012 CONST_INT 32-bit mask is considered sign-extended so any
12013 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12014 if (! mask64_operand (x, DImode))
a260abc9
DE
12015 output_operand_lossage ("invalid %%S value");
12016
0ba1b2ff 12017 uval = INT_LOWPART (x);
a260abc9 12018
0ba1b2ff 12019 if (uval & 1) /* Clear Left */
a260abc9 12020 {
f099d360
GK
12021#if HOST_BITS_PER_WIDE_INT > 64
12022 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12023#endif
0ba1b2ff 12024 i = 64;
a260abc9 12025 }
0ba1b2ff 12026 else /* Clear Right */
a260abc9 12027 {
0ba1b2ff 12028 uval = ~uval;
f099d360
GK
12029#if HOST_BITS_PER_WIDE_INT > 64
12030 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12031#endif
0ba1b2ff 12032 i = 63;
a260abc9 12033 }
0ba1b2ff
AM
12034 while (uval != 0)
12035 --i, uval >>= 1;
37409796 12036 gcc_assert (i >= 0);
0ba1b2ff
AM
12037 fprintf (file, "%d", i);
12038 return;
a260abc9 12039
a3170dc6
AH
12040 case 't':
12041 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12042 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12043
12044 /* Bit 3 is OV bit. */
12045 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12046
12047 /* If we want bit 31, write a shift count of zero, not 32. */
12048 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12049 return;
12050
cccf3bdc
DE
12051 case 'T':
12052 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12053 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12054 && REGNO (x) != CTR_REGNO))
cccf3bdc 12055 output_operand_lossage ("invalid %%T value");
1de43f85 12056 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12057 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12058 else
12059 fputs ("ctr", file);
12060 return;
12061
9854d9ed 12062 case 'u':
802a0058 12063 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12064 if (! INT_P (x))
12065 output_operand_lossage ("invalid %%u value");
e2c953b6 12066 else
f676971a 12067 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12068 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12069 return;
12070
802a0058
MM
12071 case 'v':
12072 /* High-order 16 bits of constant for use in signed operand. */
12073 if (! INT_P (x))
12074 output_operand_lossage ("invalid %%v value");
e2c953b6 12075 else
134c32f6
DE
12076 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12077 (INT_LOWPART (x) >> 16) & 0xffff);
12078 return;
802a0058 12079
9854d9ed
RK
12080 case 'U':
12081 /* Print `u' if this has an auto-increment or auto-decrement. */
12082 if (GET_CODE (x) == MEM
12083 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12084 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12085 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12086 putc ('u', file);
9854d9ed 12087 return;
9878760c 12088
e0cd0770
JC
12089 case 'V':
12090 /* Print the trap code for this operand. */
12091 switch (GET_CODE (x))
12092 {
12093 case EQ:
12094 fputs ("eq", file); /* 4 */
12095 break;
12096 case NE:
12097 fputs ("ne", file); /* 24 */
12098 break;
12099 case LT:
12100 fputs ("lt", file); /* 16 */
12101 break;
12102 case LE:
12103 fputs ("le", file); /* 20 */
12104 break;
12105 case GT:
12106 fputs ("gt", file); /* 8 */
12107 break;
12108 case GE:
12109 fputs ("ge", file); /* 12 */
12110 break;
12111 case LTU:
12112 fputs ("llt", file); /* 2 */
12113 break;
12114 case LEU:
12115 fputs ("lle", file); /* 6 */
12116 break;
12117 case GTU:
12118 fputs ("lgt", file); /* 1 */
12119 break;
12120 case GEU:
12121 fputs ("lge", file); /* 5 */
12122 break;
12123 default:
37409796 12124 gcc_unreachable ();
e0cd0770
JC
12125 }
12126 break;
12127
9854d9ed
RK
12128 case 'w':
12129 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12130 normally. */
12131 if (INT_P (x))
f676971a 12132 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12133 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12134 else
12135 print_operand (file, x, 0);
9878760c
RK
12136 return;
12137
9854d9ed 12138 case 'W':
e2c953b6 12139 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12140 val = (GET_CODE (x) == CONST_INT
12141 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12142
12143 if (val < 0)
12144 i = -1;
9854d9ed 12145 else
e2c953b6
DE
12146 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12147 if ((val <<= 1) < 0)
12148 break;
12149
12150#if HOST_BITS_PER_WIDE_INT == 32
12151 if (GET_CODE (x) == CONST_INT && i >= 0)
12152 i += 32; /* zero-extend high-part was all 0's */
12153 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12154 {
12155 val = CONST_DOUBLE_LOW (x);
12156
37409796
NS
12157 gcc_assert (val);
12158 if (val < 0)
e2c953b6
DE
12159 --i;
12160 else
12161 for ( ; i < 64; i++)
12162 if ((val <<= 1) < 0)
12163 break;
12164 }
12165#endif
12166
12167 fprintf (file, "%d", i + 1);
9854d9ed 12168 return;
9878760c 12169
9854d9ed
RK
12170 case 'X':
12171 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12172 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12173 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12174 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12175 putc ('x', file);
9854d9ed 12176 return;
9878760c 12177
9854d9ed
RK
12178 case 'Y':
12179 /* Like 'L', for third word of TImode */
12180 if (GET_CODE (x) == REG)
fb5c67a7 12181 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12182 else if (GET_CODE (x) == MEM)
9878760c 12183 {
9854d9ed
RK
12184 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12185 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12186 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12187 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12188 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12189 else
d7624dc0 12190 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12191 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12192 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12193 reg_names[SMALL_DATA_REG]);
9878760c
RK
12194 }
12195 return;
f676971a 12196
9878760c 12197 case 'z':
b4ac57ab
RS
12198 /* X is a SYMBOL_REF. Write out the name preceded by a
12199 period and without any trailing data in brackets. Used for function
4d30c363
MM
12200 names. If we are configured for System V (or the embedded ABI) on
12201 the PowerPC, do not emit the period, since those systems do not use
12202 TOCs and the like. */
37409796 12203 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12204
c4ad648e
AM
12205 /* Mark the decl as referenced so that cgraph will output the
12206 function. */
9bf6462a 12207 if (SYMBOL_REF_DECL (x))
c4ad648e 12208 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12209
85b776df 12210 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12211 if (TARGET_MACHO)
12212 {
12213 const char *name = XSTR (x, 0);
a031e781 12214#if TARGET_MACHO
3b48085e 12215 if (MACHOPIC_INDIRECT
11abc112
MM
12216 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12217 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12218#endif
12219 assemble_name (file, name);
12220 }
85b776df 12221 else if (!DOT_SYMBOLS)
9739c90c 12222 assemble_name (file, XSTR (x, 0));
85b776df
AM
12223 else
12224 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12225 return;
12226
9854d9ed
RK
12227 case 'Z':
12228 /* Like 'L', for last word of TImode. */
12229 if (GET_CODE (x) == REG)
fb5c67a7 12230 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12231 else if (GET_CODE (x) == MEM)
12232 {
12233 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12234 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12235 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12236 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12237 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12238 else
d7624dc0 12239 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12240 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12241 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12242 reg_names[SMALL_DATA_REG]);
9854d9ed 12243 }
5c23c401 12244 return;
0ac081f6 12245
a3170dc6 12246 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12247 case 'y':
12248 {
12249 rtx tmp;
12250
37409796 12251 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12252
12253 tmp = XEXP (x, 0);
12254
90d3ff1c 12255 /* Ugly hack because %y is overloaded. */
8ef65e3d 12256 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12257 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12258 || GET_MODE (x) == TFmode
12259 || GET_MODE (x) == TImode))
a3170dc6
AH
12260 {
12261 /* Handle [reg]. */
12262 if (GET_CODE (tmp) == REG)
12263 {
12264 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12265 break;
12266 }
12267 /* Handle [reg+UIMM]. */
12268 else if (GET_CODE (tmp) == PLUS &&
12269 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12270 {
12271 int x;
12272
37409796 12273 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12274
12275 x = INTVAL (XEXP (tmp, 1));
12276 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12277 break;
12278 }
12279
12280 /* Fall through. Must be [reg+reg]. */
12281 }
850e8d3d
DN
12282 if (TARGET_ALTIVEC
12283 && GET_CODE (tmp) == AND
12284 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12285 && INTVAL (XEXP (tmp, 1)) == -16)
12286 tmp = XEXP (tmp, 0);
0ac081f6 12287 if (GET_CODE (tmp) == REG)
c62f2db5 12288 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12289 else
0ac081f6 12290 {
cb8cc791
AP
12291 if (!GET_CODE (tmp) == PLUS
12292 || !REG_P (XEXP (tmp, 0))
12293 || !REG_P (XEXP (tmp, 1)))
12294 {
12295 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12296 break;
12297 }
bb8df8a6 12298
0ac081f6
AH
12299 if (REGNO (XEXP (tmp, 0)) == 0)
12300 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12301 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12302 else
12303 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12304 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12305 }
0ac081f6
AH
12306 break;
12307 }
f676971a 12308
9878760c
RK
12309 case 0:
12310 if (GET_CODE (x) == REG)
12311 fprintf (file, "%s", reg_names[REGNO (x)]);
12312 else if (GET_CODE (x) == MEM)
12313 {
12314 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12315 know the width from the mode. */
12316 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12317 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12318 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12319 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12320 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12321 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12322 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12323 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12324 else
a54d04b7 12325 output_address (XEXP (x, 0));
9878760c
RK
12326 }
12327 else
a54d04b7 12328 output_addr_const (file, x);
a85d226b 12329 return;
9878760c 12330
c4501e62
JJ
12331 case '&':
12332 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12333 return;
12334
9878760c
RK
12335 default:
12336 output_operand_lossage ("invalid %%xn code");
12337 }
12338}
12339\f
12340/* Print the address of an operand. */
12341
12342void
a2369ed3 12343print_operand_address (FILE *file, rtx x)
9878760c
RK
12344{
12345 if (GET_CODE (x) == REG)
4697a36c 12346 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12347 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12348 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12349 {
12350 output_addr_const (file, x);
ba5e43aa 12351 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12352 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12353 reg_names[SMALL_DATA_REG]);
37409796
NS
12354 else
12355 gcc_assert (!TARGET_TOC);
9878760c
RK
12356 }
12357 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12358 {
9024f4b8 12359 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12360 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12361 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12362 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12363 else
4697a36c
MM
12364 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12365 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12366 }
12367 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12368 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12369 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12370#if TARGET_ELF
12371 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12372 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12373 {
12374 output_addr_const (file, XEXP (x, 1));
12375 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12376 }
c859cda6
DJ
12377#endif
12378#if TARGET_MACHO
12379 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12380 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12381 {
12382 fprintf (file, "lo16(");
12383 output_addr_const (file, XEXP (x, 1));
12384 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12385 }
3cb999d8 12386#endif
4d588c14 12387 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12388 {
2bfcf297 12389 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12390 {
2bfcf297
DB
12391 rtx contains_minus = XEXP (x, 1);
12392 rtx minus, symref;
12393 const char *name;
f676971a 12394
9ebbca7d 12395 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12396 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12397 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12398 contains_minus = XEXP (contains_minus, 0);
12399
2bfcf297
DB
12400 minus = XEXP (contains_minus, 0);
12401 symref = XEXP (minus, 0);
0cdc04e8 12402 gcc_assert (GET_CODE (XEXP (minus, 1)) == SYMBOL_REF);
2bfcf297
DB
12403 XEXP (contains_minus, 0) = symref;
12404 if (TARGET_ELF)
12405 {
12406 char *newname;
12407
12408 name = XSTR (symref, 0);
5ead67f6 12409 newname = XALLOCAVEC (char, strlen (name) + sizeof ("@toc"));
2bfcf297
DB
12410 strcpy (newname, name);
12411 strcat (newname, "@toc");
12412 XSTR (symref, 0) = newname;
12413 }
12414 output_addr_const (file, XEXP (x, 1));
12415 if (TARGET_ELF)
12416 XSTR (symref, 0) = name;
9ebbca7d
GK
12417 XEXP (contains_minus, 0) = minus;
12418 }
12419 else
12420 output_addr_const (file, XEXP (x, 1));
12421
12422 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12423 }
9878760c 12424 else
37409796 12425 gcc_unreachable ();
9878760c
RK
12426}
12427\f
88cad84b 12428/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12429 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12430 is defined. It also needs to handle DI-mode objects on 64-bit
12431 targets. */
12432
12433static bool
a2369ed3 12434rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12435{
f4f4921e 12436#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12437 /* Special handling for SI values. */
84dcde01 12438 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12439 {
301d03af 12440 static int recurse = 0;
f676971a 12441
301d03af
RS
12442 /* For -mrelocatable, we mark all addresses that need to be fixed up
12443 in the .fixup section. */
12444 if (TARGET_RELOCATABLE
d6b5193b
RS
12445 && in_section != toc_section
12446 && in_section != text_section
4325ca90 12447 && !unlikely_text_section_p (in_section)
301d03af
RS
12448 && !recurse
12449 && GET_CODE (x) != CONST_INT
12450 && GET_CODE (x) != CONST_DOUBLE
12451 && CONSTANT_P (x))
12452 {
12453 char buf[256];
12454
12455 recurse = 1;
12456 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12457 fixuplabelno++;
12458 ASM_OUTPUT_LABEL (asm_out_file, buf);
12459 fprintf (asm_out_file, "\t.long\t(");
12460 output_addr_const (asm_out_file, x);
12461 fprintf (asm_out_file, ")@fixup\n");
12462 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12463 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12464 fprintf (asm_out_file, "\t.long\t");
12465 assemble_name (asm_out_file, buf);
12466 fprintf (asm_out_file, "\n\t.previous\n");
12467 recurse = 0;
12468 return true;
12469 }
12470 /* Remove initial .'s to turn a -mcall-aixdesc function
12471 address into the address of the descriptor, not the function
12472 itself. */
12473 else if (GET_CODE (x) == SYMBOL_REF
12474 && XSTR (x, 0)[0] == '.'
12475 && DEFAULT_ABI == ABI_AIX)
12476 {
12477 const char *name = XSTR (x, 0);
12478 while (*name == '.')
12479 name++;
12480
12481 fprintf (asm_out_file, "\t.long\t%s\n", name);
12482 return true;
12483 }
12484 }
f4f4921e 12485#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12486 return default_assemble_integer (x, size, aligned_p);
12487}
93638d7a
AM
12488
12489#ifdef HAVE_GAS_HIDDEN
12490/* Emit an assembler directive to set symbol visibility for DECL to
12491 VISIBILITY_TYPE. */
12492
5add3202 12493static void
a2369ed3 12494rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12495{
93638d7a
AM
12496 /* Functions need to have their entry point symbol visibility set as
12497 well as their descriptor symbol visibility. */
85b776df
AM
12498 if (DEFAULT_ABI == ABI_AIX
12499 && DOT_SYMBOLS
12500 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12501 {
25fdb4dc 12502 static const char * const visibility_types[] = {
c4ad648e 12503 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12504 };
12505
12506 const char *name, *type;
93638d7a
AM
12507
12508 name = ((* targetm.strip_name_encoding)
12509 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12510 type = visibility_types[vis];
93638d7a 12511
25fdb4dc
RH
12512 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12513 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12514 }
25fdb4dc
RH
12515 else
12516 default_assemble_visibility (decl, vis);
93638d7a
AM
12517}
12518#endif
301d03af 12519\f
39a10a29 12520enum rtx_code
a2369ed3 12521rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12522{
12523 /* Reversal of FP compares takes care -- an ordered compare
12524 becomes an unordered compare and vice versa. */
f676971a 12525 if (mode == CCFPmode
bc9ec0e0
GK
12526 && (!flag_finite_math_only
12527 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12528 || code == UNEQ || code == LTGT))
bab6226b 12529 return reverse_condition_maybe_unordered (code);
39a10a29 12530 else
bab6226b 12531 return reverse_condition (code);
39a10a29
GK
12532}
12533
39a10a29
GK
12534/* Generate a compare for CODE. Return a brand-new rtx that
12535 represents the result of the compare. */
a4f6c312 12536
39a10a29 12537static rtx
a2369ed3 12538rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12539{
12540 enum machine_mode comp_mode;
12541 rtx compare_result;
12542
12543 if (rs6000_compare_fp_p)
12544 comp_mode = CCFPmode;
12545 else if (code == GTU || code == LTU
c4ad648e 12546 || code == GEU || code == LEU)
39a10a29 12547 comp_mode = CCUNSmode;
60934f9c
NS
12548 else if ((code == EQ || code == NE)
12549 && GET_CODE (rs6000_compare_op0) == SUBREG
12550 && GET_CODE (rs6000_compare_op1) == SUBREG
12551 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12552 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12553 /* These are unsigned values, perhaps there will be a later
12554 ordering compare that can be shared with this one.
12555 Unfortunately we cannot detect the signedness of the operands
12556 for non-subregs. */
12557 comp_mode = CCUNSmode;
39a10a29
GK
12558 else
12559 comp_mode = CCmode;
12560
12561 /* First, the compare. */
12562 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12563
cef6b86c 12564 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12565 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12566 && rs6000_compare_fp_p)
a3170dc6 12567 {
64022b5d 12568 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12569 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12570
12571 if (op_mode == VOIDmode)
12572 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12573
cef6b86c
EB
12574 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12575 This explains the following mess. */
423c1189 12576
a3170dc6
AH
12577 switch (code)
12578 {
423c1189 12579 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12580 switch (op_mode)
12581 {
12582 case SFmode:
12583 cmp = flag_unsafe_math_optimizations
12584 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12585 rs6000_compare_op1)
12586 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12587 rs6000_compare_op1);
12588 break;
12589
12590 case DFmode:
12591 cmp = flag_unsafe_math_optimizations
12592 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12593 rs6000_compare_op1)
12594 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12595 rs6000_compare_op1);
12596 break;
12597
17caeff2
JM
12598 case TFmode:
12599 cmp = flag_unsafe_math_optimizations
12600 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12601 rs6000_compare_op1)
12602 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12603 rs6000_compare_op1);
12604 break;
12605
37409796
NS
12606 default:
12607 gcc_unreachable ();
12608 }
a3170dc6 12609 break;
bb8df8a6 12610
423c1189 12611 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12612 switch (op_mode)
12613 {
12614 case SFmode:
12615 cmp = flag_unsafe_math_optimizations
12616 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12617 rs6000_compare_op1)
12618 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12619 rs6000_compare_op1);
12620 break;
bb8df8a6 12621
37409796
NS
12622 case DFmode:
12623 cmp = flag_unsafe_math_optimizations
12624 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12625 rs6000_compare_op1)
12626 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12627 rs6000_compare_op1);
12628 break;
12629
17caeff2
JM
12630 case TFmode:
12631 cmp = flag_unsafe_math_optimizations
12632 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12633 rs6000_compare_op1)
12634 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12635 rs6000_compare_op1);
12636 break;
12637
37409796
NS
12638 default:
12639 gcc_unreachable ();
12640 }
a3170dc6 12641 break;
bb8df8a6 12642
423c1189 12643 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12644 switch (op_mode)
12645 {
12646 case SFmode:
12647 cmp = flag_unsafe_math_optimizations
12648 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12649 rs6000_compare_op1)
12650 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12651 rs6000_compare_op1);
12652 break;
bb8df8a6 12653
37409796
NS
12654 case DFmode:
12655 cmp = flag_unsafe_math_optimizations
12656 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12657 rs6000_compare_op1)
12658 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12659 rs6000_compare_op1);
12660 break;
12661
17caeff2
JM
12662 case TFmode:
12663 cmp = flag_unsafe_math_optimizations
12664 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12665 rs6000_compare_op1)
12666 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12667 rs6000_compare_op1);
12668 break;
12669
37409796
NS
12670 default:
12671 gcc_unreachable ();
12672 }
a3170dc6 12673 break;
4d4cbc0e 12674 default:
37409796 12675 gcc_unreachable ();
a3170dc6
AH
12676 }
12677
12678 /* Synthesize LE and GE from LT/GT || EQ. */
12679 if (code == LE || code == GE || code == LEU || code == GEU)
12680 {
a3170dc6
AH
12681 emit_insn (cmp);
12682
12683 switch (code)
12684 {
12685 case LE: code = LT; break;
12686 case GE: code = GT; break;
12687 case LEU: code = LT; break;
12688 case GEU: code = GT; break;
37409796 12689 default: gcc_unreachable ();
a3170dc6
AH
12690 }
12691
a3170dc6
AH
12692 compare_result2 = gen_reg_rtx (CCFPmode);
12693
12694 /* Do the EQ. */
37409796
NS
12695 switch (op_mode)
12696 {
12697 case SFmode:
12698 cmp = flag_unsafe_math_optimizations
12699 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12700 rs6000_compare_op1)
12701 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12702 rs6000_compare_op1);
12703 break;
12704
12705 case DFmode:
12706 cmp = flag_unsafe_math_optimizations
12707 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12708 rs6000_compare_op1)
12709 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12710 rs6000_compare_op1);
12711 break;
12712
17caeff2
JM
12713 case TFmode:
12714 cmp = flag_unsafe_math_optimizations
12715 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12716 rs6000_compare_op1)
12717 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12718 rs6000_compare_op1);
12719 break;
12720
37409796
NS
12721 default:
12722 gcc_unreachable ();
12723 }
a3170dc6
AH
12724 emit_insn (cmp);
12725
a3170dc6 12726 /* OR them together. */
64022b5d
AH
12727 or_result = gen_reg_rtx (CCFPmode);
12728 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12729 compare_result2);
a3170dc6
AH
12730 compare_result = or_result;
12731 code = EQ;
12732 }
12733 else
12734 {
a3170dc6 12735 if (code == NE || code == LTGT)
a3170dc6 12736 code = NE;
423c1189
AH
12737 else
12738 code = EQ;
a3170dc6
AH
12739 }
12740
12741 emit_insn (cmp);
12742 }
12743 else
de17c25f
DE
12744 {
12745 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12746 CLOBBERs to match cmptf_internal2 pattern. */
12747 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12748 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12749 && !TARGET_IEEEQUAD
de17c25f
DE
12750 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12751 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12752 gen_rtvec (9,
12753 gen_rtx_SET (VOIDmode,
12754 compare_result,
12755 gen_rtx_COMPARE (comp_mode,
12756 rs6000_compare_op0,
12757 rs6000_compare_op1)),
12758 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12759 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12760 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12761 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12762 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12763 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12764 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12765 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12766 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12767 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12768 {
12769 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12770 comp_mode = CCEQmode;
12771 compare_result = gen_reg_rtx (CCEQmode);
12772 if (TARGET_64BIT)
12773 emit_insn (gen_stack_protect_testdi (compare_result,
12774 rs6000_compare_op0, op1));
12775 else
12776 emit_insn (gen_stack_protect_testsi (compare_result,
12777 rs6000_compare_op0, op1));
12778 }
de17c25f
DE
12779 else
12780 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12781 gen_rtx_COMPARE (comp_mode,
12782 rs6000_compare_op0,
12783 rs6000_compare_op1)));
12784 }
f676971a 12785
ca5adc63 12786 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12787 under flag_finite_math_only we don't bother. */
39a10a29 12788 if (rs6000_compare_fp_p
e7108df9 12789 && !flag_finite_math_only
8ef65e3d 12790 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12791 && (code == LE || code == GE
12792 || code == UNEQ || code == LTGT
12793 || code == UNGT || code == UNLT))
12794 {
12795 enum rtx_code or1, or2;
12796 rtx or1_rtx, or2_rtx, compare2_rtx;
12797 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12798
39a10a29
GK
12799 switch (code)
12800 {
12801 case LE: or1 = LT; or2 = EQ; break;
12802 case GE: or1 = GT; or2 = EQ; break;
12803 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12804 case LTGT: or1 = LT; or2 = GT; break;
12805 case UNGT: or1 = UNORDERED; or2 = GT; break;
12806 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12807 default: gcc_unreachable ();
39a10a29
GK
12808 }
12809 validate_condition_mode (or1, comp_mode);
12810 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12811 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12812 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12813 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12814 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12815 const_true_rtx);
12816 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12817
12818 compare_result = or_result;
12819 code = EQ;
12820 }
12821
12822 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12823
1c563bed 12824 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12825}
12826
12827
12828/* Emit the RTL for an sCOND pattern. */
12829
12830void
a2369ed3 12831rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12832{
12833 rtx condition_rtx;
12834 enum machine_mode op_mode;
b7053a3f 12835 enum rtx_code cond_code;
39a10a29
GK
12836
12837 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12838 cond_code = GET_CODE (condition_rtx);
12839
8ef65e3d 12840 if (rs6000_compare_fp_p
423c1189
AH
12841 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12842 {
12843 rtx t;
12844
12845 PUT_MODE (condition_rtx, SImode);
12846 t = XEXP (condition_rtx, 0);
12847
37409796 12848 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12849
12850 if (cond_code == NE)
64022b5d 12851 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12852
64022b5d 12853 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12854 return;
12855 }
12856
b7053a3f
GK
12857 if (cond_code == NE
12858 || cond_code == GE || cond_code == LE
12859 || cond_code == GEU || cond_code == LEU
12860 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12861 {
12862 rtx not_result = gen_reg_rtx (CCEQmode);
12863 rtx not_op, rev_cond_rtx;
12864 enum machine_mode cc_mode;
f676971a 12865
b7053a3f
GK
12866 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12867
1c563bed 12868 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12869 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12870 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12871 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12872 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12873 }
39a10a29
GK
12874
12875 op_mode = GET_MODE (rs6000_compare_op0);
12876 if (op_mode == VOIDmode)
12877 op_mode = GET_MODE (rs6000_compare_op1);
12878
12879 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12880 {
12881 PUT_MODE (condition_rtx, DImode);
12882 convert_move (result, condition_rtx, 0);
12883 }
12884 else
12885 {
12886 PUT_MODE (condition_rtx, SImode);
12887 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12888 }
12889}
12890
39a10a29
GK
12891/* Emit a branch of kind CODE to location LOC. */
12892
12893void
a2369ed3 12894rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12895{
12896 rtx condition_rtx, loc_ref;
12897
12898 condition_rtx = rs6000_generate_compare (code);
12899 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12900 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12901 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12902 loc_ref, pc_rtx)));
12903}
12904
12a4e8c5
GK
12905/* Return the string to output a conditional branch to LABEL, which is
12906 the operand number of the label, or -1 if the branch is really a
f676971a 12907 conditional return.
12a4e8c5
GK
12908
12909 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12910 condition code register and its mode specifies what kind of
12911 comparison we made.
12912
a0ab749a 12913 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12914
12915 INSN is the insn. */
12916
12917char *
a2369ed3 12918output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12919{
12920 static char string[64];
12921 enum rtx_code code = GET_CODE (op);
12922 rtx cc_reg = XEXP (op, 0);
12923 enum machine_mode mode = GET_MODE (cc_reg);
12924 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12925 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12926 int really_reversed = reversed ^ need_longbranch;
12927 char *s = string;
12928 const char *ccode;
12929 const char *pred;
12930 rtx note;
12931
39a10a29
GK
12932 validate_condition_mode (code, mode);
12933
12934 /* Work out which way this really branches. We could use
12935 reverse_condition_maybe_unordered here always but this
12936 makes the resulting assembler clearer. */
12a4e8c5 12937 if (really_reversed)
de40e1df
DJ
12938 {
12939 /* Reversal of FP compares takes care -- an ordered compare
12940 becomes an unordered compare and vice versa. */
12941 if (mode == CCFPmode)
12942 code = reverse_condition_maybe_unordered (code);
12943 else
12944 code = reverse_condition (code);
12945 }
12a4e8c5 12946
8ef65e3d 12947 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12948 {
12949 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12950 to the GT bit. */
37409796
NS
12951 switch (code)
12952 {
12953 case EQ:
12954 /* Opposite of GT. */
12955 code = GT;
12956 break;
12957
12958 case NE:
12959 code = UNLE;
12960 break;
12961
12962 default:
12963 gcc_unreachable ();
12964 }
a3170dc6
AH
12965 }
12966
39a10a29 12967 switch (code)
12a4e8c5
GK
12968 {
12969 /* Not all of these are actually distinct opcodes, but
12970 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12971 case NE: case LTGT:
12972 ccode = "ne"; break;
12973 case EQ: case UNEQ:
12974 ccode = "eq"; break;
f676971a 12975 case GE: case GEU:
50a0b056 12976 ccode = "ge"; break;
f676971a 12977 case GT: case GTU: case UNGT:
50a0b056 12978 ccode = "gt"; break;
f676971a 12979 case LE: case LEU:
50a0b056 12980 ccode = "le"; break;
f676971a 12981 case LT: case LTU: case UNLT:
50a0b056 12982 ccode = "lt"; break;
12a4e8c5
GK
12983 case UNORDERED: ccode = "un"; break;
12984 case ORDERED: ccode = "nu"; break;
12985 case UNGE: ccode = "nl"; break;
12986 case UNLE: ccode = "ng"; break;
12987 default:
37409796 12988 gcc_unreachable ();
12a4e8c5 12989 }
f676971a
EC
12990
12991 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12992 The old mnemonics don't have a way to specify this information. */
f4857b9b 12993 pred = "";
12a4e8c5
GK
12994 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12995 if (note != NULL_RTX)
12996 {
12997 /* PROB is the difference from 50%. */
12998 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12999
13000 /* Only hint for highly probable/improbable branches on newer
13001 cpus as static prediction overrides processor dynamic
13002 prediction. For older cpus we may as well always hint, but
13003 assume not taken for branches that are very close to 50% as a
13004 mispredicted taken branch is more expensive than a
f676971a 13005 mispredicted not-taken branch. */
ec507f2d 13006 if (rs6000_always_hint
2c9e13f3
JH
13007 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13008 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13009 {
13010 if (abs (prob) > REG_BR_PROB_BASE / 20
13011 && ((prob > 0) ^ need_longbranch))
c4ad648e 13012 pred = "+";
f4857b9b
AM
13013 else
13014 pred = "-";
13015 }
12a4e8c5 13016 }
12a4e8c5
GK
13017
13018 if (label == NULL)
94a54f47 13019 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13020 else
94a54f47 13021 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13022
37c67319 13023 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13024 Assume they'd only be the first character.... */
37c67319
GK
13025 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13026 *s++ = '%';
94a54f47 13027 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13028
13029 if (label != NULL)
13030 {
13031 /* If the branch distance was too far, we may have to use an
13032 unconditional branch to go the distance. */
13033 if (need_longbranch)
44518ddd 13034 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13035 else
13036 s += sprintf (s, ",%s", label);
13037 }
13038
13039 return string;
13040}
50a0b056 13041
64022b5d 13042/* Return the string to flip the GT bit on a CR. */
423c1189 13043char *
64022b5d 13044output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13045{
13046 static char string[64];
13047 int a, b;
13048
37409796
NS
13049 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13050 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13051
64022b5d
AH
13052 /* GT bit. */
13053 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13054 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13055
13056 sprintf (string, "crnot %d,%d", a, b);
13057 return string;
13058}
13059
21213b4c
DP
13060/* Return insn index for the vector compare instruction for given CODE,
13061 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13062 not available. */
13063
13064static int
94ff898d 13065get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13066 enum machine_mode dest_mode,
13067 enum machine_mode op_mode)
13068{
13069 if (!TARGET_ALTIVEC)
13070 return INSN_NOT_AVAILABLE;
13071
13072 switch (code)
13073 {
13074 case EQ:
13075 if (dest_mode == V16QImode && op_mode == V16QImode)
13076 return UNSPEC_VCMPEQUB;
13077 if (dest_mode == V8HImode && op_mode == V8HImode)
13078 return UNSPEC_VCMPEQUH;
13079 if (dest_mode == V4SImode && op_mode == V4SImode)
13080 return UNSPEC_VCMPEQUW;
13081 if (dest_mode == V4SImode && op_mode == V4SFmode)
13082 return UNSPEC_VCMPEQFP;
13083 break;
13084 case GE:
13085 if (dest_mode == V4SImode && op_mode == V4SFmode)
13086 return UNSPEC_VCMPGEFP;
13087 case GT:
13088 if (dest_mode == V16QImode && op_mode == V16QImode)
13089 return UNSPEC_VCMPGTSB;
13090 if (dest_mode == V8HImode && op_mode == V8HImode)
13091 return UNSPEC_VCMPGTSH;
13092 if (dest_mode == V4SImode && op_mode == V4SImode)
13093 return UNSPEC_VCMPGTSW;
13094 if (dest_mode == V4SImode && op_mode == V4SFmode)
13095 return UNSPEC_VCMPGTFP;
13096 break;
13097 case GTU:
13098 if (dest_mode == V16QImode && op_mode == V16QImode)
13099 return UNSPEC_VCMPGTUB;
13100 if (dest_mode == V8HImode && op_mode == V8HImode)
13101 return UNSPEC_VCMPGTUH;
13102 if (dest_mode == V4SImode && op_mode == V4SImode)
13103 return UNSPEC_VCMPGTUW;
13104 break;
13105 default:
13106 break;
13107 }
13108 return INSN_NOT_AVAILABLE;
13109}
13110
13111/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13112 DMODE is expected destination mode. This is a recursive function. */
13113
13114static rtx
13115rs6000_emit_vector_compare (enum rtx_code rcode,
13116 rtx op0, rtx op1,
13117 enum machine_mode dmode)
13118{
13119 int vec_cmp_insn;
13120 rtx mask;
13121 enum machine_mode dest_mode;
13122 enum machine_mode op_mode = GET_MODE (op1);
13123
37409796
NS
13124 gcc_assert (TARGET_ALTIVEC);
13125 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13126
13127 /* Floating point vector compare instructions uses destination V4SImode.
13128 Move destination to appropriate mode later. */
13129 if (dmode == V4SFmode)
13130 dest_mode = V4SImode;
13131 else
13132 dest_mode = dmode;
13133
13134 mask = gen_reg_rtx (dest_mode);
13135 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13136
13137 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13138 {
13139 bool swap_operands = false;
13140 bool try_again = false;
13141 switch (rcode)
13142 {
13143 case LT:
13144 rcode = GT;
13145 swap_operands = true;
13146 try_again = true;
13147 break;
13148 case LTU:
13149 rcode = GTU;
13150 swap_operands = true;
13151 try_again = true;
13152 break;
13153 case NE:
370df7db
JC
13154 case UNLE:
13155 case UNLT:
13156 case UNGE:
13157 case UNGT:
13158 /* Invert condition and try again.
13159 e.g., A != B becomes ~(A==B). */
21213b4c 13160 {
370df7db 13161 enum rtx_code rev_code;
21213b4c 13162 enum insn_code nor_code;
d1123cde 13163 rtx eq_rtx;
370df7db
JC
13164
13165 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13166 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13167 dest_mode);
94ff898d 13168
166cdb08 13169 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13170 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13171 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13172
13173 if (dmode != dest_mode)
13174 {
13175 rtx temp = gen_reg_rtx (dest_mode);
13176 convert_move (temp, mask, 0);
13177 return temp;
13178 }
13179 return mask;
13180 }
13181 break;
13182 case GE:
13183 case GEU:
13184 case LE:
13185 case LEU:
13186 /* Try GT/GTU/LT/LTU OR EQ */
13187 {
13188 rtx c_rtx, eq_rtx;
13189 enum insn_code ior_code;
13190 enum rtx_code new_code;
13191
37409796
NS
13192 switch (rcode)
13193 {
13194 case GE:
13195 new_code = GT;
13196 break;
13197
13198 case GEU:
13199 new_code = GTU;
13200 break;
13201
13202 case LE:
13203 new_code = LT;
13204 break;
13205
13206 case LEU:
13207 new_code = LTU;
13208 break;
13209
13210 default:
13211 gcc_unreachable ();
13212 }
21213b4c
DP
13213
13214 c_rtx = rs6000_emit_vector_compare (new_code,
13215 op0, op1, dest_mode);
13216 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13217 dest_mode);
13218
166cdb08 13219 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13220 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13221 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13222 if (dmode != dest_mode)
13223 {
13224 rtx temp = gen_reg_rtx (dest_mode);
13225 convert_move (temp, mask, 0);
13226 return temp;
13227 }
13228 return mask;
13229 }
13230 break;
13231 default:
37409796 13232 gcc_unreachable ();
21213b4c
DP
13233 }
13234
13235 if (try_again)
13236 {
13237 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13238 /* You only get two chances. */
13239 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13240 }
13241
13242 if (swap_operands)
13243 {
13244 rtx tmp;
13245 tmp = op0;
13246 op0 = op1;
13247 op1 = tmp;
13248 }
13249 }
13250
915167f5
GK
13251 emit_insn (gen_rtx_SET (VOIDmode, mask,
13252 gen_rtx_UNSPEC (dest_mode,
13253 gen_rtvec (2, op0, op1),
13254 vec_cmp_insn)));
21213b4c
DP
13255 if (dmode != dest_mode)
13256 {
13257 rtx temp = gen_reg_rtx (dest_mode);
13258 convert_move (temp, mask, 0);
13259 return temp;
13260 }
13261 return mask;
13262}
13263
13264/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13265 valid insn doesn exist for given mode. */
13266
13267static int
13268get_vsel_insn (enum machine_mode mode)
13269{
13270 switch (mode)
13271 {
13272 case V4SImode:
13273 return UNSPEC_VSEL4SI;
13274 break;
13275 case V4SFmode:
13276 return UNSPEC_VSEL4SF;
13277 break;
13278 case V8HImode:
13279 return UNSPEC_VSEL8HI;
13280 break;
13281 case V16QImode:
13282 return UNSPEC_VSEL16QI;
13283 break;
13284 default:
13285 return INSN_NOT_AVAILABLE;
13286 break;
13287 }
13288 return INSN_NOT_AVAILABLE;
13289}
13290
13291/* Emit vector select insn where DEST is destination using
13292 operands OP1, OP2 and MASK. */
13293
13294static void
13295rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13296{
13297 rtx t, temp;
13298 enum machine_mode dest_mode = GET_MODE (dest);
13299 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13300
13301 temp = gen_reg_rtx (dest_mode);
94ff898d 13302
bb8df8a6 13303 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13304 select op2. */
915167f5
GK
13305 t = gen_rtx_SET (VOIDmode, temp,
13306 gen_rtx_UNSPEC (dest_mode,
13307 gen_rtvec (3, op2, op1, mask),
13308 vsel_insn_index));
21213b4c
DP
13309 emit_insn (t);
13310 emit_move_insn (dest, temp);
13311 return;
13312}
13313
94ff898d 13314/* Emit vector conditional expression.
21213b4c
DP
13315 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13316 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13317
13318int
13319rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13320 rtx cond, rtx cc_op0, rtx cc_op1)
13321{
13322 enum machine_mode dest_mode = GET_MODE (dest);
13323 enum rtx_code rcode = GET_CODE (cond);
13324 rtx mask;
13325
13326 if (!TARGET_ALTIVEC)
13327 return 0;
13328
13329 /* Get the vector mask for the given relational operations. */
13330 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13331
13332 rs6000_emit_vector_select (dest, op1, op2, mask);
13333
13334 return 1;
13335}
13336
50a0b056
GK
13337/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13338 operands of the last comparison is nonzero/true, FALSE_COND if it
13339 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13340
50a0b056 13341int
a2369ed3 13342rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13343{
13344 enum rtx_code code = GET_CODE (op);
13345 rtx op0 = rs6000_compare_op0;
13346 rtx op1 = rs6000_compare_op1;
13347 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13348 enum machine_mode compare_mode = GET_MODE (op0);
13349 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13350 rtx temp;
add2402e 13351 bool is_against_zero;
50a0b056 13352
a3c9585f 13353 /* These modes should always match. */
a3170dc6
AH
13354 if (GET_MODE (op1) != compare_mode
13355 /* In the isel case however, we can use a compare immediate, so
13356 op1 may be a small constant. */
13357 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13358 return 0;
178c3eff 13359 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13360 return 0;
178c3eff 13361 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13362 return 0;
13363
50a0b056 13364 /* First, work out if the hardware can do this at all, or
a3c9585f 13365 if it's too slow.... */
50a0b056 13366 if (! rs6000_compare_fp_p)
a3170dc6
AH
13367 {
13368 if (TARGET_ISEL)
13369 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13370 return 0;
13371 }
8ef65e3d 13372 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13373 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13374 return 0;
50a0b056 13375
add2402e 13376 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13377
add2402e
GK
13378 /* A floating-point subtract might overflow, underflow, or produce
13379 an inexact result, thus changing the floating-point flags, so it
13380 can't be generated if we care about that. It's safe if one side
13381 of the construct is zero, since then no subtract will be
13382 generated. */
ebb109ad 13383 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13384 && flag_trapping_math && ! is_against_zero)
13385 return 0;
13386
50a0b056
GK
13387 /* Eliminate half of the comparisons by switching operands, this
13388 makes the remaining code simpler. */
13389 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13390 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13391 {
13392 code = reverse_condition_maybe_unordered (code);
13393 temp = true_cond;
13394 true_cond = false_cond;
13395 false_cond = temp;
13396 }
13397
13398 /* UNEQ and LTGT take four instructions for a comparison with zero,
13399 it'll probably be faster to use a branch here too. */
bc9ec0e0 13400 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13401 return 0;
f676971a 13402
50a0b056
GK
13403 if (GET_CODE (op1) == CONST_DOUBLE)
13404 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13405
b6d08ca1 13406 /* We're going to try to implement comparisons by performing
50a0b056
GK
13407 a subtract, then comparing against zero. Unfortunately,
13408 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13409 know that the operand is finite and the comparison
50a0b056 13410 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13411 if (HONOR_INFINITIES (compare_mode)
50a0b056 13412 && code != GT && code != UNGE
045572c7 13413 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13414 /* Constructs of the form (a OP b ? a : b) are safe. */
13415 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13416 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13417 && ! rtx_equal_p (op1, true_cond))))
13418 return 0;
add2402e 13419
50a0b056
GK
13420 /* At this point we know we can use fsel. */
13421
13422 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13423 if (! is_against_zero)
13424 {
13425 temp = gen_reg_rtx (compare_mode);
13426 emit_insn (gen_rtx_SET (VOIDmode, temp,
13427 gen_rtx_MINUS (compare_mode, op0, op1)));
13428 op0 = temp;
13429 op1 = CONST0_RTX (compare_mode);
13430 }
50a0b056
GK
13431
13432 /* If we don't care about NaNs we can reduce some of the comparisons
13433 down to faster ones. */
bc9ec0e0 13434 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13435 switch (code)
13436 {
13437 case GT:
13438 code = LE;
13439 temp = true_cond;
13440 true_cond = false_cond;
13441 false_cond = temp;
13442 break;
13443 case UNGE:
13444 code = GE;
13445 break;
13446 case UNEQ:
13447 code = EQ;
13448 break;
13449 default:
13450 break;
13451 }
13452
13453 /* Now, reduce everything down to a GE. */
13454 switch (code)
13455 {
13456 case GE:
13457 break;
13458
13459 case LE:
3148ad6d
DJ
13460 temp = gen_reg_rtx (compare_mode);
13461 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13462 op0 = temp;
13463 break;
13464
13465 case ORDERED:
3148ad6d
DJ
13466 temp = gen_reg_rtx (compare_mode);
13467 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13468 op0 = temp;
13469 break;
13470
13471 case EQ:
3148ad6d 13472 temp = gen_reg_rtx (compare_mode);
f676971a 13473 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13474 gen_rtx_NEG (compare_mode,
13475 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13476 op0 = temp;
13477 break;
13478
13479 case UNGE:
bc9ec0e0 13480 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13481 temp = gen_reg_rtx (result_mode);
50a0b056 13482 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13483 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13484 gen_rtx_GE (VOIDmode,
13485 op0, op1),
13486 true_cond, false_cond)));
bc9ec0e0
GK
13487 false_cond = true_cond;
13488 true_cond = temp;
50a0b056 13489
3148ad6d
DJ
13490 temp = gen_reg_rtx (compare_mode);
13491 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13492 op0 = temp;
13493 break;
13494
13495 case GT:
bc9ec0e0 13496 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13497 temp = gen_reg_rtx (result_mode);
50a0b056 13498 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13499 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13500 gen_rtx_GE (VOIDmode,
13501 op0, op1),
13502 true_cond, false_cond)));
bc9ec0e0
GK
13503 true_cond = false_cond;
13504 false_cond = temp;
50a0b056 13505
3148ad6d
DJ
13506 temp = gen_reg_rtx (compare_mode);
13507 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13508 op0 = temp;
13509 break;
13510
13511 default:
37409796 13512 gcc_unreachable ();
50a0b056
GK
13513 }
13514
13515 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13516 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13517 gen_rtx_GE (VOIDmode,
13518 op0, op1),
13519 true_cond, false_cond)));
13520 return 1;
13521}
13522
a3170dc6
AH
13523/* Same as above, but for ints (isel). */
13524
13525static int
a2369ed3 13526rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13527{
13528 rtx condition_rtx, cr;
13529
13530 /* All isel implementations thus far are 32-bits. */
13531 if (GET_MODE (rs6000_compare_op0) != SImode)
13532 return 0;
13533
13534 /* We still have to do the compare, because isel doesn't do a
13535 compare, it just looks at the CRx bits set by a previous compare
13536 instruction. */
13537 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13538 cr = XEXP (condition_rtx, 0);
13539
13540 if (GET_MODE (cr) == CCmode)
13541 emit_insn (gen_isel_signed (dest, condition_rtx,
13542 true_cond, false_cond, cr));
13543 else
13544 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13545 true_cond, false_cond, cr));
13546
13547 return 1;
13548}
13549
13550const char *
a2369ed3 13551output_isel (rtx *operands)
a3170dc6
AH
13552{
13553 enum rtx_code code;
13554
13555 code = GET_CODE (operands[1]);
13556 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13557 {
13558 PUT_CODE (operands[1], reverse_condition (code));
13559 return "isel %0,%3,%2,%j1";
13560 }
13561 else
13562 return "isel %0,%2,%3,%j1";
13563}
13564
50a0b056 13565void
a2369ed3 13566rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13567{
13568 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13569 enum rtx_code c;
50a0b056 13570 rtx target;
5dc8d536
AH
13571
13572 if (code == SMAX || code == SMIN)
13573 c = GE;
13574 else
13575 c = GEU;
13576
50a0b056 13577 if (code == SMAX || code == UMAX)
f676971a 13578 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13579 op0, op1, mode, 0);
13580 else
f676971a 13581 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13582 op1, op0, mode, 0);
37409796 13583 gcc_assert (target);
50a0b056
GK
13584 if (target != dest)
13585 emit_move_insn (dest, target);
13586}
46c07df8 13587
915167f5
GK
13588/* Emit instructions to perform a load-reserved/store-conditional operation.
13589 The operation performed is an atomic
13590 (set M (CODE:MODE M OP))
13591 If not NULL, BEFORE is atomically set to M before the operation, and
13592 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13593 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13594 Either OP or M may be wrapped in a NOT operation. */
13595
13596void
13597rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13598 rtx m, rtx op, rtx before_param, rtx after_param,
13599 bool sync_p)
13600{
13601 enum machine_mode used_mode;
13602 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13603 rtx used_m;
13604 rtvec vec;
13605 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13606 rtx shift = NULL_RTX;
bb8df8a6 13607
915167f5
GK
13608 if (sync_p)
13609 emit_insn (gen_memory_barrier ());
bb8df8a6 13610
915167f5
GK
13611 if (GET_CODE (m) == NOT)
13612 used_m = XEXP (m, 0);
13613 else
13614 used_m = m;
13615
13616 /* If this is smaller than SImode, we'll have to use SImode with
13617 adjustments. */
13618 if (mode == QImode || mode == HImode)
13619 {
13620 rtx newop, oldop;
13621
13622 if (MEM_ALIGN (used_m) >= 32)
13623 {
13624 int ishift = 0;
13625 if (BYTES_BIG_ENDIAN)
13626 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13627
915167f5 13628 shift = GEN_INT (ishift);
c75c6d11 13629 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13630 }
13631 else
13632 {
13633 rtx addrSI, aligned_addr;
a9c9d3fa 13634 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13635
c75c6d11
JJ
13636 addrSI = gen_lowpart_common (SImode,
13637 force_reg (Pmode, XEXP (used_m, 0)));
13638 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13639 shift = gen_reg_rtx (SImode);
13640
13641 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13642 GEN_INT (shift_mask)));
13643 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13644
13645 aligned_addr = expand_binop (Pmode, and_optab,
13646 XEXP (used_m, 0),
13647 GEN_INT (-4), NULL_RTX,
13648 1, OPTAB_LIB_WIDEN);
13649 used_m = change_address (used_m, SImode, aligned_addr);
13650 set_mem_align (used_m, 32);
915167f5 13651 }
c75c6d11
JJ
13652 /* It's safe to keep the old alias set of USED_M, because
13653 the operation is atomic and only affects the original
13654 USED_M. */
13655 if (GET_CODE (m) == NOT)
13656 m = gen_rtx_NOT (SImode, used_m);
13657 else
13658 m = used_m;
915167f5
GK
13659
13660 if (GET_CODE (op) == NOT)
13661 {
13662 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13663 oldop = gen_rtx_NOT (SImode, oldop);
13664 }
13665 else
13666 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13667
915167f5
GK
13668 switch (code)
13669 {
13670 case IOR:
13671 case XOR:
13672 newop = expand_binop (SImode, and_optab,
13673 oldop, GEN_INT (imask), NULL_RTX,
13674 1, OPTAB_LIB_WIDEN);
13675 emit_insn (gen_ashlsi3 (newop, newop, shift));
13676 break;
13677
13678 case AND:
13679 newop = expand_binop (SImode, ior_optab,
13680 oldop, GEN_INT (~imask), NULL_RTX,
13681 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13682 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13683 break;
13684
13685 case PLUS:
9f0076e5 13686 case MINUS:
915167f5
GK
13687 {
13688 rtx mask;
bb8df8a6 13689
915167f5
GK
13690 newop = expand_binop (SImode, and_optab,
13691 oldop, GEN_INT (imask), NULL_RTX,
13692 1, OPTAB_LIB_WIDEN);
13693 emit_insn (gen_ashlsi3 (newop, newop, shift));
13694
13695 mask = gen_reg_rtx (SImode);
13696 emit_move_insn (mask, GEN_INT (imask));
13697 emit_insn (gen_ashlsi3 (mask, mask, shift));
13698
9f0076e5
DE
13699 if (code == PLUS)
13700 newop = gen_rtx_PLUS (SImode, m, newop);
13701 else
13702 newop = gen_rtx_MINUS (SImode, m, newop);
13703 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13704 newop = gen_rtx_IOR (SImode, newop,
13705 gen_rtx_AND (SImode,
13706 gen_rtx_NOT (SImode, mask),
13707 m));
13708 break;
13709 }
13710
13711 default:
13712 gcc_unreachable ();
13713 }
13714
a9c9d3fa
GK
13715 if (GET_CODE (m) == NOT)
13716 {
13717 rtx mask, xorm;
13718
13719 mask = gen_reg_rtx (SImode);
13720 emit_move_insn (mask, GEN_INT (imask));
13721 emit_insn (gen_ashlsi3 (mask, mask, shift));
13722
13723 xorm = gen_rtx_XOR (SImode, used_m, mask);
13724 /* Depending on the value of 'op', the XOR or the operation might
13725 be able to be simplified away. */
13726 newop = simplify_gen_binary (code, SImode, xorm, newop);
13727 }
915167f5
GK
13728 op = newop;
13729 used_mode = SImode;
13730 before = gen_reg_rtx (used_mode);
13731 after = gen_reg_rtx (used_mode);
13732 }
13733 else
13734 {
13735 used_mode = mode;
13736 before = before_param;
13737 after = after_param;
13738
13739 if (before == NULL_RTX)
13740 before = gen_reg_rtx (used_mode);
13741 if (after == NULL_RTX)
13742 after = gen_reg_rtx (used_mode);
13743 }
bb8df8a6 13744
9f0076e5
DE
13745 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13746 && used_mode != mode)
915167f5
GK
13747 the_op = op; /* Computed above. */
13748 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13749 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13750 else
13751 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13752
13753 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13754 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13755 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13756 gen_rtx_UNSPEC (used_mode,
13757 gen_rtvec (1, the_op),
13758 UNSPEC_SYNC_OP));
915167f5
GK
13759 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13760
9f0076e5 13761 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13762 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13763 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13764 else
13765 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13766 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13767
13768 /* Shift and mask the return values properly. */
13769 if (used_mode != mode && before_param)
13770 {
13771 emit_insn (gen_lshrsi3 (before, before, shift));
13772 convert_move (before_param, before, 1);
13773 }
13774
13775 if (used_mode != mode && after_param)
13776 {
13777 emit_insn (gen_lshrsi3 (after, after, shift));
13778 convert_move (after_param, after, 1);
13779 }
13780
13781 /* The previous sequence will end with a branch that's dependent on
13782 the conditional store, so placing an isync will ensure that no
13783 other instructions (especially, no load or store instructions)
13784 can start before the atomic operation completes. */
13785 if (sync_p)
13786 emit_insn (gen_isync ());
13787}
13788
b52110d4
DE
13789/* A subroutine of the atomic operation splitters. Jump to LABEL if
13790 COND is true. Mark the jump as unlikely to be taken. */
13791
13792static void
13793emit_unlikely_jump (rtx cond, rtx label)
13794{
13795 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13796 rtx x;
13797
13798 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13799 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13800 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13801}
13802
13803/* A subroutine of the atomic operation splitters. Emit a load-locked
13804 instruction in MODE. */
13805
13806static void
13807emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13808{
13809 rtx (*fn) (rtx, rtx) = NULL;
13810 if (mode == SImode)
13811 fn = gen_load_locked_si;
13812 else if (mode == DImode)
13813 fn = gen_load_locked_di;
13814 emit_insn (fn (reg, mem));
13815}
13816
13817/* A subroutine of the atomic operation splitters. Emit a store-conditional
13818 instruction in MODE. */
13819
13820static void
13821emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13822{
13823 rtx (*fn) (rtx, rtx, rtx) = NULL;
13824 if (mode == SImode)
13825 fn = gen_store_conditional_si;
13826 else if (mode == DImode)
13827 fn = gen_store_conditional_di;
13828
9f0076e5 13829 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13830 if (PPC405_ERRATUM77)
13831 emit_insn (gen_memory_barrier ());
13832
13833 emit_insn (fn (res, mem, val));
13834}
13835
ea2c620c 13836/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13837 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13838 operand of the binary operator. BEFORE and AFTER are optional locations to
13839 return the value of MEM either before of after the operation. SCRATCH is
13840 a scratch register. */
13841
13842void
13843rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13844 rtx before, rtx after, rtx scratch)
13845{
13846 enum machine_mode mode = GET_MODE (mem);
13847 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13848
13849 emit_insn (gen_memory_barrier ());
13850
13851 label = gen_label_rtx ();
13852 emit_label (label);
13853 label = gen_rtx_LABEL_REF (VOIDmode, label);
13854
13855 if (before == NULL_RTX)
13856 before = scratch;
13857 emit_load_locked (mode, before, mem);
13858
13859 if (code == NOT)
13860 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13861 else if (code == AND)
13862 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13863 else
13864 x = gen_rtx_fmt_ee (code, mode, before, val);
13865
13866 if (after != NULL_RTX)
13867 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13868 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13869
13870 emit_store_conditional (mode, cond, mem, scratch);
13871
13872 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13873 emit_unlikely_jump (x, label);
13874
13875 emit_insn (gen_isync ());
13876}
13877
b52110d4
DE
13878/* Expand an atomic compare and swap operation. MEM is the memory on which
13879 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13880 value to be stored. SCRATCH is a scratch GPR. */
13881
13882void
13883rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13884 rtx scratch)
13885{
13886 enum machine_mode mode = GET_MODE (mem);
13887 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13888
13889 emit_insn (gen_memory_barrier ());
13890
13891 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13892 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13893 emit_label (XEXP (label1, 0));
13894
13895 emit_load_locked (mode, retval, mem);
13896
13897 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13898 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13899
13900 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13901 emit_unlikely_jump (x, label2);
13902
13903 emit_move_insn (scratch, newval);
13904 emit_store_conditional (mode, cond, mem, scratch);
13905
13906 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13907 emit_unlikely_jump (x, label1);
13908
13909 emit_insn (gen_isync ());
13910 emit_label (XEXP (label2, 0));
13911}
13912
13913/* Expand an atomic test and set operation. MEM is the memory on which
13914 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13915
13916void
13917rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13918{
13919 enum machine_mode mode = GET_MODE (mem);
13920 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13921
13922 emit_insn (gen_memory_barrier ());
13923
13924 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13925 emit_label (XEXP (label, 0));
13926
13927 emit_load_locked (mode, retval, mem);
13928 emit_move_insn (scratch, val);
13929 emit_store_conditional (mode, cond, mem, scratch);
13930
13931 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13932 emit_unlikely_jump (x, label);
13933
13934 emit_insn (gen_isync ());
13935}
13936
9fc75b97
DE
13937void
13938rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13939{
13940 enum machine_mode mode = GET_MODE (mem);
13941 rtx addrSI, align, wdst, shift, mask;
13942 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13943 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13944
13945 /* Shift amount for subword relative to aligned word. */
13946 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13947 shift = gen_reg_rtx (SImode);
13948 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13949 GEN_INT (shift_mask)));
13950 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13951
13952 /* Shift and mask old value into position within word. */
13953 oldval = convert_modes (SImode, mode, oldval, 1);
13954 oldval = expand_binop (SImode, and_optab,
13955 oldval, GEN_INT (imask), NULL_RTX,
13956 1, OPTAB_LIB_WIDEN);
13957 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13958
13959 /* Shift and mask new value into position within word. */
13960 newval = convert_modes (SImode, mode, newval, 1);
13961 newval = expand_binop (SImode, and_optab,
13962 newval, GEN_INT (imask), NULL_RTX,
13963 1, OPTAB_LIB_WIDEN);
13964 emit_insn (gen_ashlsi3 (newval, newval, shift));
13965
13966 /* Mask for insertion. */
13967 mask = gen_reg_rtx (SImode);
13968 emit_move_insn (mask, GEN_INT (imask));
13969 emit_insn (gen_ashlsi3 (mask, mask, shift));
13970
13971 /* Address of aligned word containing subword. */
13972 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13973 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13974 mem = change_address (mem, SImode, align);
13975 set_mem_align (mem, 32);
13976 MEM_VOLATILE_P (mem) = 1;
13977
13978 wdst = gen_reg_rtx (SImode);
13979 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13980 oldval, newval, mem));
13981
2725b75c
JJ
13982 /* Shift the result back. */
13983 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
13984
9fc75b97
DE
13985 emit_move_insn (dst, gen_lowpart (mode, wdst));
13986}
13987
13988void
13989rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13990 rtx oldval, rtx newval, rtx mem,
13991 rtx scratch)
13992{
13993 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13994
13995 emit_insn (gen_memory_barrier ());
13996 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13997 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13998 emit_label (XEXP (label1, 0));
13999
14000 emit_load_locked (SImode, scratch, mem);
14001
14002 /* Mask subword within loaded value for comparison with oldval.
14003 Use UNSPEC_AND to avoid clobber.*/
14004 emit_insn (gen_rtx_SET (SImode, dest,
14005 gen_rtx_UNSPEC (SImode,
14006 gen_rtvec (2, scratch, mask),
14007 UNSPEC_AND)));
14008
14009 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14010 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14011
14012 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14013 emit_unlikely_jump (x, label2);
14014
14015 /* Clear subword within loaded value for insertion of new value. */
14016 emit_insn (gen_rtx_SET (SImode, scratch,
14017 gen_rtx_AND (SImode,
14018 gen_rtx_NOT (SImode, mask), scratch)));
14019 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14020 emit_store_conditional (SImode, cond, mem, scratch);
14021
14022 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14023 emit_unlikely_jump (x, label1);
14024
14025 emit_insn (gen_isync ());
14026 emit_label (XEXP (label2, 0));
14027}
14028
14029
b52110d4 14030 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14031 multi-register moves. It will emit at most one instruction for
14032 each register that is accessed; that is, it won't emit li/lis pairs
14033 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14034 register. */
46c07df8 14035
46c07df8 14036void
a9baceb1 14037rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14038{
a9baceb1
GK
14039 /* The register number of the first register being moved. */
14040 int reg;
14041 /* The mode that is to be moved. */
14042 enum machine_mode mode;
14043 /* The mode that the move is being done in, and its size. */
14044 enum machine_mode reg_mode;
14045 int reg_mode_size;
14046 /* The number of registers that will be moved. */
14047 int nregs;
14048
14049 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14050 mode = GET_MODE (dst);
c8b622ff 14051 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14052 if (FP_REGNO_P (reg))
7393f7f8 14053 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
14054 else if (ALTIVEC_REGNO_P (reg))
14055 reg_mode = V16QImode;
4f011e1e
JM
14056 else if (TARGET_E500_DOUBLE && mode == TFmode)
14057 reg_mode = DFmode;
a9baceb1
GK
14058 else
14059 reg_mode = word_mode;
14060 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14061
37409796 14062 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14063
a9baceb1
GK
14064 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14065 {
14066 /* Move register range backwards, if we might have destructive
14067 overlap. */
14068 int i;
14069 for (i = nregs - 1; i >= 0; i--)
f676971a 14070 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14071 simplify_gen_subreg (reg_mode, dst, mode,
14072 i * reg_mode_size),
14073 simplify_gen_subreg (reg_mode, src, mode,
14074 i * reg_mode_size)));
14075 }
46c07df8
HP
14076 else
14077 {
a9baceb1
GK
14078 int i;
14079 int j = -1;
14080 bool used_update = false;
46c07df8 14081
c1e55850 14082 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14083 {
14084 rtx breg;
3a1f863f 14085
a9baceb1
GK
14086 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14087 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14088 {
14089 rtx delta_rtx;
a9baceb1 14090 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14091 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14092 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14093 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14094 emit_insn (TARGET_32BIT
14095 ? gen_addsi3 (breg, breg, delta_rtx)
14096 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14097 src = replace_equiv_address (src, breg);
3a1f863f 14098 }
d04b6e6e 14099 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14100 {
13e2e16e 14101 rtx basereg;
c1e55850
GK
14102 basereg = gen_rtx_REG (Pmode, reg);
14103 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14104 src = replace_equiv_address (src, basereg);
c1e55850 14105 }
3a1f863f 14106
0423421f
AM
14107 breg = XEXP (src, 0);
14108 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14109 breg = XEXP (breg, 0);
14110
14111 /* If the base register we are using to address memory is
14112 also a destination reg, then change that register last. */
14113 if (REG_P (breg)
14114 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14115 && REGNO (breg) < REGNO (dst) + nregs)
14116 j = REGNO (breg) - REGNO (dst);
c4ad648e 14117 }
46c07df8 14118
a9baceb1 14119 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14120 {
14121 rtx breg;
14122
a9baceb1
GK
14123 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14124 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14125 {
14126 rtx delta_rtx;
a9baceb1 14127 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14128 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14129 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14130 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14131
14132 /* We have to update the breg before doing the store.
14133 Use store with update, if available. */
14134
14135 if (TARGET_UPDATE)
14136 {
a9baceb1 14137 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14138 emit_insn (TARGET_32BIT
14139 ? (TARGET_POWERPC64
14140 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14141 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14142 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14143 used_update = true;
3a1f863f
DE
14144 }
14145 else
a9baceb1
GK
14146 emit_insn (TARGET_32BIT
14147 ? gen_addsi3 (breg, breg, delta_rtx)
14148 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14149 dst = replace_equiv_address (dst, breg);
3a1f863f 14150 }
37409796 14151 else
d04b6e6e 14152 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14153 }
14154
46c07df8 14155 for (i = 0; i < nregs; i++)
f676971a 14156 {
3a1f863f
DE
14157 /* Calculate index to next subword. */
14158 ++j;
f676971a 14159 if (j == nregs)
3a1f863f 14160 j = 0;
46c07df8 14161
112cdef5 14162 /* If compiler already emitted move of first word by
a9baceb1 14163 store with update, no need to do anything. */
3a1f863f 14164 if (j == 0 && used_update)
a9baceb1 14165 continue;
f676971a 14166
a9baceb1
GK
14167 emit_insn (gen_rtx_SET (VOIDmode,
14168 simplify_gen_subreg (reg_mode, dst, mode,
14169 j * reg_mode_size),
14170 simplify_gen_subreg (reg_mode, src, mode,
14171 j * reg_mode_size)));
3a1f863f 14172 }
46c07df8
HP
14173 }
14174}
14175
12a4e8c5 14176\f
a4f6c312
SS
14177/* This page contains routines that are used to determine what the
14178 function prologue and epilogue code will do and write them out. */
9878760c 14179
a4f6c312
SS
14180/* Return the first fixed-point register that is required to be
14181 saved. 32 if none. */
9878760c
RK
14182
14183int
863d938c 14184first_reg_to_save (void)
9878760c
RK
14185{
14186 int first_reg;
14187
14188 /* Find lowest numbered live register. */
14189 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14190 if (df_regs_ever_live_p (first_reg)
a38d360d 14191 && (! call_used_regs[first_reg]
1db02437 14192 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14193 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14194 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14195 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14196 break;
14197
ee890fe2 14198#if TARGET_MACHO
93638d7a 14199 if (flag_pic
e3b5732b 14200 && crtl->uses_pic_offset_table
93638d7a 14201 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14202 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14203#endif
14204
9878760c
RK
14205 return first_reg;
14206}
14207
14208/* Similar, for FP regs. */
14209
14210int
863d938c 14211first_fp_reg_to_save (void)
9878760c
RK
14212{
14213 int first_reg;
14214
14215 /* Find lowest numbered live register. */
14216 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14217 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14218 break;
14219
14220 return first_reg;
14221}
00b960c7
AH
14222
14223/* Similar, for AltiVec regs. */
14224
14225static int
863d938c 14226first_altivec_reg_to_save (void)
00b960c7
AH
14227{
14228 int i;
14229
14230 /* Stack frame remains as is unless we are in AltiVec ABI. */
14231 if (! TARGET_ALTIVEC_ABI)
14232 return LAST_ALTIVEC_REGNO + 1;
14233
22fa69da 14234 /* On Darwin, the unwind routines are compiled without
982afe02 14235 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14236 altivec registers when necessary. */
e3b5732b 14237 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14238 && ! TARGET_ALTIVEC)
14239 return FIRST_ALTIVEC_REGNO + 20;
14240
00b960c7
AH
14241 /* Find lowest numbered live register. */
14242 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14243 if (df_regs_ever_live_p (i))
00b960c7
AH
14244 break;
14245
14246 return i;
14247}
14248
14249/* Return a 32-bit mask of the AltiVec registers we need to set in
14250 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14251 the 32-bit word is 0. */
14252
14253static unsigned int
863d938c 14254compute_vrsave_mask (void)
00b960c7
AH
14255{
14256 unsigned int i, mask = 0;
14257
22fa69da 14258 /* On Darwin, the unwind routines are compiled without
982afe02 14259 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14260 call-saved altivec registers when necessary. */
e3b5732b 14261 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14262 && ! TARGET_ALTIVEC)
14263 mask |= 0xFFF;
14264
00b960c7
AH
14265 /* First, find out if we use _any_ altivec registers. */
14266 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14267 if (df_regs_ever_live_p (i))
00b960c7
AH
14268 mask |= ALTIVEC_REG_BIT (i);
14269
14270 if (mask == 0)
14271 return mask;
14272
00b960c7
AH
14273 /* Next, remove the argument registers from the set. These must
14274 be in the VRSAVE mask set by the caller, so we don't need to add
14275 them in again. More importantly, the mask we compute here is
14276 used to generate CLOBBERs in the set_vrsave insn, and we do not
14277 wish the argument registers to die. */
38173d38 14278 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14279 mask &= ~ALTIVEC_REG_BIT (i);
14280
14281 /* Similarly, remove the return value from the set. */
14282 {
14283 bool yes = false;
14284 diddle_return_value (is_altivec_return_reg, &yes);
14285 if (yes)
14286 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14287 }
14288
14289 return mask;
14290}
14291
d62294f5 14292/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14293 size of prologues/epilogues by calling our own save/restore-the-world
14294 routines. */
d62294f5
FJ
14295
14296static void
f57fe068
AM
14297compute_save_world_info (rs6000_stack_t *info_ptr)
14298{
14299 info_ptr->world_save_p = 1;
14300 info_ptr->world_save_p
14301 = (WORLD_SAVE_P (info_ptr)
14302 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14303 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14304 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14305 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14306 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14307 && info_ptr->cr_save_p);
f676971a 14308
d62294f5
FJ
14309 /* This will not work in conjunction with sibcalls. Make sure there
14310 are none. (This check is expensive, but seldom executed.) */
f57fe068 14311 if (WORLD_SAVE_P (info_ptr))
f676971a 14312 {
d62294f5
FJ
14313 rtx insn;
14314 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14315 if ( GET_CODE (insn) == CALL_INSN
14316 && SIBLING_CALL_P (insn))
14317 {
14318 info_ptr->world_save_p = 0;
14319 break;
14320 }
d62294f5 14321 }
f676971a 14322
f57fe068 14323 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14324 {
14325 /* Even if we're not touching VRsave, make sure there's room on the
14326 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14327 will attempt to save it. */
d62294f5
FJ
14328 info_ptr->vrsave_size = 4;
14329
298ac1dd
AP
14330 /* If we are going to save the world, we need to save the link register too. */
14331 info_ptr->lr_save_p = 1;
14332
d62294f5
FJ
14333 /* "Save" the VRsave register too if we're saving the world. */
14334 if (info_ptr->vrsave_mask == 0)
c4ad648e 14335 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14336
14337 /* Because the Darwin register save/restore routines only handle
c4ad648e 14338 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14339 check. */
37409796
NS
14340 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14341 && (info_ptr->first_altivec_reg_save
14342 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14343 }
f676971a 14344 return;
d62294f5
FJ
14345}
14346
14347
00b960c7 14348static void
a2369ed3 14349is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14350{
14351 bool *yes = (bool *) xyes;
14352 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14353 *yes = true;
14354}
14355
4697a36c
MM
14356\f
14357/* Calculate the stack information for the current function. This is
14358 complicated by having two separate calling sequences, the AIX calling
14359 sequence and the V.4 calling sequence.
14360
592696dd 14361 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14362 32-bit 64-bit
4697a36c 14363 SP----> +---------------------------------------+
a260abc9 14364 | back chain to caller | 0 0
4697a36c 14365 +---------------------------------------+
a260abc9 14366 | saved CR | 4 8 (8-11)
4697a36c 14367 +---------------------------------------+
a260abc9 14368 | saved LR | 8 16
4697a36c 14369 +---------------------------------------+
a260abc9 14370 | reserved for compilers | 12 24
4697a36c 14371 +---------------------------------------+
a260abc9 14372 | reserved for binders | 16 32
4697a36c 14373 +---------------------------------------+
a260abc9 14374 | saved TOC pointer | 20 40
4697a36c 14375 +---------------------------------------+
a260abc9 14376 | Parameter save area (P) | 24 48
4697a36c 14377 +---------------------------------------+
a260abc9 14378 | Alloca space (A) | 24+P etc.
802a0058 14379 +---------------------------------------+
a7df97e6 14380 | Local variable space (L) | 24+P+A
4697a36c 14381 +---------------------------------------+
a7df97e6 14382 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14383 +---------------------------------------+
00b960c7
AH
14384 | Save area for AltiVec registers (W) | 24+P+A+L+X
14385 +---------------------------------------+
14386 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14387 +---------------------------------------+
14388 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14389 +---------------------------------------+
00b960c7
AH
14390 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14391 +---------------------------------------+
14392 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14393 +---------------------------------------+
14394 old SP->| back chain to caller's caller |
14395 +---------------------------------------+
14396
5376a30c
KR
14397 The required alignment for AIX configurations is two words (i.e., 8
14398 or 16 bytes).
14399
14400
4697a36c
MM
14401 V.4 stack frames look like:
14402
14403 SP----> +---------------------------------------+
14404 | back chain to caller | 0
14405 +---------------------------------------+
5eb387b8 14406 | caller's saved LR | 4
4697a36c
MM
14407 +---------------------------------------+
14408 | Parameter save area (P) | 8
14409 +---------------------------------------+
a7df97e6 14410 | Alloca space (A) | 8+P
f676971a 14411 +---------------------------------------+
a7df97e6 14412 | Varargs save area (V) | 8+P+A
f676971a 14413 +---------------------------------------+
a7df97e6 14414 | Local variable space (L) | 8+P+A+V
f676971a 14415 +---------------------------------------+
a7df97e6 14416 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14417 +---------------------------------------+
00b960c7
AH
14418 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14419 +---------------------------------------+
14420 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14421 +---------------------------------------+
14422 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14423 +---------------------------------------+
c4ad648e
AM
14424 | SPE: area for 64-bit GP registers |
14425 +---------------------------------------+
14426 | SPE alignment padding |
14427 +---------------------------------------+
00b960c7 14428 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14429 +---------------------------------------+
00b960c7 14430 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14431 +---------------------------------------+
00b960c7 14432 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14433 +---------------------------------------+
14434 old SP->| back chain to caller's caller |
14435 +---------------------------------------+
b6c9286a 14436
5376a30c
KR
14437 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14438 given. (But note below and in sysv4.h that we require only 8 and
14439 may round up the size of our stack frame anyways. The historical
14440 reason is early versions of powerpc-linux which didn't properly
14441 align the stack at program startup. A happy side-effect is that
14442 -mno-eabi libraries can be used with -meabi programs.)
14443
50d440bc 14444 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14445 the stack alignment requirements may differ. If -mno-eabi is not
14446 given, the required stack alignment is 8 bytes; if -mno-eabi is
14447 given, the required alignment is 16 bytes. (But see V.4 comment
14448 above.) */
4697a36c 14449
61b2fbe7
MM
14450#ifndef ABI_STACK_BOUNDARY
14451#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14452#endif
14453
d1d0c603 14454static rs6000_stack_t *
863d938c 14455rs6000_stack_info (void)
4697a36c 14456{
022123e6 14457 static rs6000_stack_t info;
4697a36c 14458 rs6000_stack_t *info_ptr = &info;
327e5343 14459 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14460 int ehrd_size;
64045029 14461 int save_align;
8070c91a 14462 int first_gp;
44688022 14463 HOST_WIDE_INT non_fixed_size;
4697a36c 14464
022123e6 14465 memset (&info, 0, sizeof (info));
4697a36c 14466
c19de7aa
AH
14467 if (TARGET_SPE)
14468 {
14469 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14470 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14471 cfun->machine->insn_chain_scanned_p
14472 = spe_func_has_64bit_regs_p () + 1;
14473 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14474 }
14475
a4f6c312 14476 /* Select which calling sequence. */
178274da 14477 info_ptr->abi = DEFAULT_ABI;
9878760c 14478
a4f6c312 14479 /* Calculate which registers need to be saved & save area size. */
4697a36c 14480 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14481 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14482 even if it currently looks like we won't. Reload may need it to
14483 get at a constant; if so, it will have already created a constant
14484 pool entry for it. */
2bfcf297 14485 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14486 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14487 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14488 && crtl->uses_const_pool
1db02437 14489 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14490 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14491 else
8070c91a
DJ
14492 first_gp = info_ptr->first_gp_reg_save;
14493
14494 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14495
a3170dc6
AH
14496 /* For the SPE, we have an additional upper 32-bits on each GPR.
14497 Ideally we should save the entire 64-bits only when the upper
14498 half is used in SIMD instructions. Since we only record
14499 registers live (not the size they are used in), this proves
14500 difficult because we'd have to traverse the instruction chain at
14501 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14502 so we opt to save the GPRs in 64-bits always if but one register
14503 gets used in 64-bits. Otherwise, all the registers in the frame
14504 get saved in 32-bits.
a3170dc6 14505
c19de7aa 14506 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14507 traditional GP save area will be empty. */
c19de7aa 14508 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14509 info_ptr->gp_size = 0;
14510
4697a36c
MM
14511 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14512 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14513
00b960c7
AH
14514 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14515 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14516 - info_ptr->first_altivec_reg_save);
14517
592696dd 14518 /* Does this function call anything? */
71f123ca
FS
14519 info_ptr->calls_p = (! current_function_is_leaf
14520 || cfun->machine->ra_needs_full_frame);
b6c9286a 14521
a4f6c312 14522 /* Determine if we need to save the link register. */
022123e6 14523 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14524 && crtl->profile
022123e6 14525 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14526#ifdef TARGET_RELOCATABLE
14527 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14528#endif
14529 || (info_ptr->first_fp_reg_save != 64
14530 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14531 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14532 || info_ptr->calls_p
14533 || rs6000_ra_ever_killed ())
4697a36c
MM
14534 {
14535 info_ptr->lr_save_p = 1;
1de43f85 14536 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14537 }
14538
9ebbca7d 14539 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14540 if (df_regs_ever_live_p (CR2_REGNO)
14541 || df_regs_ever_live_p (CR3_REGNO)
14542 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14543 {
14544 info_ptr->cr_save_p = 1;
178274da 14545 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14546 info_ptr->cr_size = reg_size;
14547 }
14548
83720594
RH
14549 /* If the current function calls __builtin_eh_return, then we need
14550 to allocate stack space for registers that will hold data for
14551 the exception handler. */
e3b5732b 14552 if (crtl->calls_eh_return)
83720594
RH
14553 {
14554 unsigned int i;
14555 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14556 continue;
a3170dc6
AH
14557
14558 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14559 ehrd_size = i * (TARGET_SPE_ABI
14560 && info_ptr->spe_64bit_regs_used != 0
14561 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14562 }
14563 else
14564 ehrd_size = 0;
14565
592696dd 14566 /* Determine various sizes. */
4697a36c
MM
14567 info_ptr->reg_size = reg_size;
14568 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14569 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14570 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14571 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14572 if (FRAME_GROWS_DOWNWARD)
14573 info_ptr->vars_size
5b667039
JJ
14574 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14575 + info_ptr->parm_size,
7d5175e1 14576 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14577 - (info_ptr->fixed_size + info_ptr->vars_size
14578 + info_ptr->parm_size);
00b960c7 14579
c19de7aa 14580 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14581 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14582 else
14583 info_ptr->spe_gp_size = 0;
14584
4d774ff8
HP
14585 if (TARGET_ALTIVEC_ABI)
14586 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14587 else
4d774ff8
HP
14588 info_ptr->vrsave_mask = 0;
14589
14590 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14591 info_ptr->vrsave_size = 4;
14592 else
14593 info_ptr->vrsave_size = 0;
b6c9286a 14594
d62294f5
FJ
14595 compute_save_world_info (info_ptr);
14596
592696dd 14597 /* Calculate the offsets. */
178274da 14598 switch (DEFAULT_ABI)
4697a36c 14599 {
b6c9286a 14600 case ABI_NONE:
24d304eb 14601 default:
37409796 14602 gcc_unreachable ();
b6c9286a
MM
14603
14604 case ABI_AIX:
ee890fe2 14605 case ABI_DARWIN:
b6c9286a
MM
14606 info_ptr->fp_save_offset = - info_ptr->fp_size;
14607 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14608
14609 if (TARGET_ALTIVEC_ABI)
14610 {
14611 info_ptr->vrsave_save_offset
14612 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14613
982afe02 14614 /* Align stack so vector save area is on a quadword boundary.
9278121c 14615 The padding goes above the vectors. */
00b960c7
AH
14616 if (info_ptr->altivec_size != 0)
14617 info_ptr->altivec_padding_size
9278121c 14618 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14619 else
14620 info_ptr->altivec_padding_size = 0;
14621
14622 info_ptr->altivec_save_offset
14623 = info_ptr->vrsave_save_offset
14624 - info_ptr->altivec_padding_size
14625 - info_ptr->altivec_size;
9278121c
GK
14626 gcc_assert (info_ptr->altivec_size == 0
14627 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14628
14629 /* Adjust for AltiVec case. */
14630 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14631 }
14632 else
14633 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14634 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14635 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14636 break;
14637
14638 case ABI_V4:
b6c9286a
MM
14639 info_ptr->fp_save_offset = - info_ptr->fp_size;
14640 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14641 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14642
c19de7aa 14643 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14644 {
14645 /* Align stack so SPE GPR save area is aligned on a
14646 double-word boundary. */
f78c3290 14647 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14648 info_ptr->spe_padding_size
14649 = 8 - (-info_ptr->cr_save_offset % 8);
14650 else
14651 info_ptr->spe_padding_size = 0;
14652
14653 info_ptr->spe_gp_save_offset
14654 = info_ptr->cr_save_offset
14655 - info_ptr->spe_padding_size
14656 - info_ptr->spe_gp_size;
14657
14658 /* Adjust for SPE case. */
022123e6 14659 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14660 }
a3170dc6 14661 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14662 {
14663 info_ptr->vrsave_save_offset
14664 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14665
14666 /* Align stack so vector save area is on a quadword boundary. */
14667 if (info_ptr->altivec_size != 0)
14668 info_ptr->altivec_padding_size
14669 = 16 - (-info_ptr->vrsave_save_offset % 16);
14670 else
14671 info_ptr->altivec_padding_size = 0;
14672
14673 info_ptr->altivec_save_offset
14674 = info_ptr->vrsave_save_offset
14675 - info_ptr->altivec_padding_size
14676 - info_ptr->altivec_size;
14677
14678 /* Adjust for AltiVec case. */
022123e6 14679 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14680 }
14681 else
022123e6
AM
14682 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14683 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14684 info_ptr->lr_save_offset = reg_size;
14685 break;
4697a36c
MM
14686 }
14687
64045029 14688 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14689 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14690 + info_ptr->gp_size
14691 + info_ptr->altivec_size
14692 + info_ptr->altivec_padding_size
a3170dc6
AH
14693 + info_ptr->spe_gp_size
14694 + info_ptr->spe_padding_size
00b960c7
AH
14695 + ehrd_size
14696 + info_ptr->cr_size
022123e6 14697 + info_ptr->vrsave_size,
64045029 14698 save_align);
00b960c7 14699
44688022 14700 non_fixed_size = (info_ptr->vars_size
ff381587 14701 + info_ptr->parm_size
5b667039 14702 + info_ptr->save_size);
ff381587 14703
44688022
AM
14704 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14705 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14706
14707 /* Determine if we need to allocate any stack frame:
14708
a4f6c312
SS
14709 For AIX we need to push the stack if a frame pointer is needed
14710 (because the stack might be dynamically adjusted), if we are
14711 debugging, if we make calls, or if the sum of fp_save, gp_save,
14712 and local variables are more than the space needed to save all
14713 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14714 + 18*8 = 288 (GPR13 reserved).
ff381587 14715
a4f6c312
SS
14716 For V.4 we don't have the stack cushion that AIX uses, but assume
14717 that the debugger can handle stackless frames. */
ff381587
MM
14718
14719 if (info_ptr->calls_p)
14720 info_ptr->push_p = 1;
14721
178274da 14722 else if (DEFAULT_ABI == ABI_V4)
44688022 14723 info_ptr->push_p = non_fixed_size != 0;
ff381587 14724
178274da
AM
14725 else if (frame_pointer_needed)
14726 info_ptr->push_p = 1;
14727
14728 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14729 info_ptr->push_p = 1;
14730
ff381587 14731 else
44688022 14732 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14733
a4f6c312 14734 /* Zero offsets if we're not saving those registers. */
8dda1a21 14735 if (info_ptr->fp_size == 0)
4697a36c
MM
14736 info_ptr->fp_save_offset = 0;
14737
8dda1a21 14738 if (info_ptr->gp_size == 0)
4697a36c
MM
14739 info_ptr->gp_save_offset = 0;
14740
00b960c7
AH
14741 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14742 info_ptr->altivec_save_offset = 0;
14743
14744 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14745 info_ptr->vrsave_save_offset = 0;
14746
c19de7aa
AH
14747 if (! TARGET_SPE_ABI
14748 || info_ptr->spe_64bit_regs_used == 0
14749 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14750 info_ptr->spe_gp_save_offset = 0;
14751
c81fc13e 14752 if (! info_ptr->lr_save_p)
4697a36c
MM
14753 info_ptr->lr_save_offset = 0;
14754
c81fc13e 14755 if (! info_ptr->cr_save_p)
4697a36c
MM
14756 info_ptr->cr_save_offset = 0;
14757
14758 return info_ptr;
14759}
14760
c19de7aa
AH
14761/* Return true if the current function uses any GPRs in 64-bit SIMD
14762 mode. */
14763
14764static bool
863d938c 14765spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14766{
14767 rtx insns, insn;
14768
14769 /* Functions that save and restore all the call-saved registers will
14770 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14771 if (crtl->calls_eh_return
14772 || cfun->calls_setjmp
14773 || crtl->has_nonlocal_goto)
c19de7aa
AH
14774 return true;
14775
14776 insns = get_insns ();
14777
14778 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14779 {
14780 if (INSN_P (insn))
14781 {
14782 rtx i;
14783
b5a5beb9
AH
14784 /* FIXME: This should be implemented with attributes...
14785
14786 (set_attr "spe64" "true")....then,
14787 if (get_spe64(insn)) return true;
14788
14789 It's the only reliable way to do the stuff below. */
14790
c19de7aa 14791 i = PATTERN (insn);
f82f556d
AH
14792 if (GET_CODE (i) == SET)
14793 {
14794 enum machine_mode mode = GET_MODE (SET_SRC (i));
14795
14796 if (SPE_VECTOR_MODE (mode))
14797 return true;
4f011e1e 14798 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14799 return true;
14800 }
c19de7aa
AH
14801 }
14802 }
14803
14804 return false;
14805}
14806
d1d0c603 14807static void
a2369ed3 14808debug_stack_info (rs6000_stack_t *info)
9878760c 14809{
d330fd93 14810 const char *abi_string;
24d304eb 14811
c81fc13e 14812 if (! info)
4697a36c
MM
14813 info = rs6000_stack_info ();
14814
14815 fprintf (stderr, "\nStack information for function %s:\n",
14816 ((current_function_decl && DECL_NAME (current_function_decl))
14817 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14818 : "<unknown>"));
14819
24d304eb
RK
14820 switch (info->abi)
14821 {
b6c9286a
MM
14822 default: abi_string = "Unknown"; break;
14823 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14824 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14825 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14826 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14827 }
14828
14829 fprintf (stderr, "\tABI = %5s\n", abi_string);
14830
00b960c7
AH
14831 if (TARGET_ALTIVEC_ABI)
14832 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14833
a3170dc6
AH
14834 if (TARGET_SPE_ABI)
14835 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14836
4697a36c
MM
14837 if (info->first_gp_reg_save != 32)
14838 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14839
14840 if (info->first_fp_reg_save != 64)
14841 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14842
00b960c7
AH
14843 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14844 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14845 info->first_altivec_reg_save);
14846
4697a36c
MM
14847 if (info->lr_save_p)
14848 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14849
4697a36c
MM
14850 if (info->cr_save_p)
14851 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14852
00b960c7
AH
14853 if (info->vrsave_mask)
14854 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14855
4697a36c
MM
14856 if (info->push_p)
14857 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14858
14859 if (info->calls_p)
14860 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14861
4697a36c
MM
14862 if (info->gp_save_offset)
14863 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14864
14865 if (info->fp_save_offset)
14866 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14867
00b960c7
AH
14868 if (info->altivec_save_offset)
14869 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14870 info->altivec_save_offset);
14871
a3170dc6
AH
14872 if (info->spe_gp_save_offset)
14873 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14874 info->spe_gp_save_offset);
14875
00b960c7
AH
14876 if (info->vrsave_save_offset)
14877 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14878 info->vrsave_save_offset);
14879
4697a36c
MM
14880 if (info->lr_save_offset)
14881 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14882
14883 if (info->cr_save_offset)
14884 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14885
14886 if (info->varargs_save_offset)
14887 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14888
14889 if (info->total_size)
d1d0c603
JJ
14890 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14891 info->total_size);
4697a36c 14892
4697a36c 14893 if (info->vars_size)
d1d0c603
JJ
14894 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14895 info->vars_size);
4697a36c
MM
14896
14897 if (info->parm_size)
14898 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14899
14900 if (info->fixed_size)
14901 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14902
14903 if (info->gp_size)
14904 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14905
a3170dc6
AH
14906 if (info->spe_gp_size)
14907 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14908
4697a36c
MM
14909 if (info->fp_size)
14910 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14911
00b960c7
AH
14912 if (info->altivec_size)
14913 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14914
14915 if (info->vrsave_size)
14916 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14917
14918 if (info->altivec_padding_size)
14919 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14920 info->altivec_padding_size);
14921
a3170dc6
AH
14922 if (info->spe_padding_size)
14923 fprintf (stderr, "\tspe_padding_size = %5d\n",
14924 info->spe_padding_size);
14925
4697a36c
MM
14926 if (info->cr_size)
14927 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14928
14929 if (info->save_size)
14930 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14931
14932 if (info->reg_size != 4)
14933 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14934
14935 fprintf (stderr, "\n");
9878760c 14936}
71f123ca
FS
14937
14938rtx
a2369ed3 14939rs6000_return_addr (int count, rtx frame)
71f123ca 14940{
a4f6c312
SS
14941 /* Currently we don't optimize very well between prolog and body
14942 code and for PIC code the code can be actually quite bad, so
14943 don't try to be too clever here. */
f1384257 14944 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14945 {
14946 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14947
14948 return
14949 gen_rtx_MEM
14950 (Pmode,
14951 memory_address
14952 (Pmode,
14953 plus_constant (copy_to_reg
14954 (gen_rtx_MEM (Pmode,
14955 memory_address (Pmode, frame))),
14956 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14957 }
14958
8c29550d 14959 cfun->machine->ra_need_lr = 1;
1de43f85 14960 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14961}
14962
5e1bf043
DJ
14963/* Say whether a function is a candidate for sibcall handling or not.
14964 We do not allow indirect calls to be optimized into sibling calls.
14965 Also, we can't do it if there are any vector parameters; there's
14966 nowhere to put the VRsave code so it works; note that functions with
14967 vector parameters are required to have a prototype, so the argument
14968 type info must be available here. (The tail recursion case can work
14969 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14970static bool
a2369ed3 14971rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14972{
14973 tree type;
4977bab6 14974 if (decl)
5e1bf043
DJ
14975 {
14976 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14977 {
4977bab6 14978 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14979 type; type = TREE_CHAIN (type))
14980 {
c15b529f 14981 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14982 return false;
5e1bf043 14983 }
c4ad648e 14984 }
5e1bf043 14985 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14986 || ((*targetm.binds_local_p) (decl)
14987 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14988 {
4977bab6 14989 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14990
14991 if (!lookup_attribute ("longcall", attr_list)
14992 || lookup_attribute ("shortcall", attr_list))
4977bab6 14993 return true;
2bcc50d0 14994 }
5e1bf043 14995 }
4977bab6 14996 return false;
5e1bf043
DJ
14997}
14998
e7e64a25
AS
14999/* NULL if INSN insn is valid within a low-overhead loop.
15000 Otherwise return why doloop cannot be applied.
9419649c
DE
15001 PowerPC uses the COUNT register for branch on table instructions. */
15002
e7e64a25 15003static const char *
3101faab 15004rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15005{
15006 if (CALL_P (insn))
e7e64a25 15007 return "Function call in the loop.";
9419649c
DE
15008
15009 if (JUMP_P (insn)
15010 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15011 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15012 return "Computed branch in the loop.";
9419649c 15013
e7e64a25 15014 return NULL;
9419649c
DE
15015}
15016
71f123ca 15017static int
863d938c 15018rs6000_ra_ever_killed (void)
71f123ca
FS
15019{
15020 rtx top;
5e1bf043
DJ
15021 rtx reg;
15022 rtx insn;
71f123ca 15023
e3b5732b 15024 if (crtl->is_thunk)
71f123ca 15025 return 0;
eb0424da 15026
36f7e964
AH
15027 /* regs_ever_live has LR marked as used if any sibcalls are present,
15028 but this should not force saving and restoring in the
15029 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15030 clobbers LR, so that is inappropriate. */
36f7e964 15031
5e1bf043
DJ
15032 /* Also, the prologue can generate a store into LR that
15033 doesn't really count, like this:
36f7e964 15034
5e1bf043
DJ
15035 move LR->R0
15036 bcl to set PIC register
15037 move LR->R31
15038 move R0->LR
36f7e964
AH
15039
15040 When we're called from the epilogue, we need to avoid counting
15041 this as a store. */
f676971a 15042
71f123ca
FS
15043 push_topmost_sequence ();
15044 top = get_insns ();
15045 pop_topmost_sequence ();
1de43f85 15046 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15047
5e1bf043
DJ
15048 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15049 {
15050 if (INSN_P (insn))
15051 {
022123e6
AM
15052 if (CALL_P (insn))
15053 {
15054 if (!SIBLING_CALL_P (insn))
15055 return 1;
15056 }
1de43f85 15057 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15058 return 1;
36f7e964
AH
15059 else if (set_of (reg, insn) != NULL_RTX
15060 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15061 return 1;
15062 }
15063 }
15064 return 0;
71f123ca 15065}
4697a36c 15066\f
9ebbca7d 15067/* Emit instructions needed to load the TOC register.
c7ca610e 15068 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15069 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15070
15071void
a2369ed3 15072rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15073{
6fb5fa3c 15074 rtx dest;
1db02437 15075 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15076
7f970b70 15077 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15078 {
7f970b70 15079 char buf[30];
e65a3857 15080 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15081
15082 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15083 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15084 if (flag_pic == 2)
15085 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15086 else
15087 got = rs6000_got_sym ();
15088 tmp1 = tmp2 = dest;
15089 if (!fromprolog)
15090 {
15091 tmp1 = gen_reg_rtx (Pmode);
15092 tmp2 = gen_reg_rtx (Pmode);
15093 }
6fb5fa3c
DB
15094 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15095 emit_move_insn (tmp1,
1de43f85 15096 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15097 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15098 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15099 }
15100 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15101 {
6fb5fa3c 15102 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15103 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15104 }
15105 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15106 {
15107 char buf[30];
20b71b17
AM
15108 rtx temp0 = (fromprolog
15109 ? gen_rtx_REG (Pmode, 0)
15110 : gen_reg_rtx (Pmode));
20b71b17 15111
20b71b17
AM
15112 if (fromprolog)
15113 {
ccbca5e4 15114 rtx symF, symL;
38c1f2d7 15115
20b71b17
AM
15116 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15117 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15118
20b71b17
AM
15119 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15120 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15121
6fb5fa3c
DB
15122 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15123 emit_move_insn (dest,
1de43f85 15124 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15125 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15126 }
15127 else
20b71b17
AM
15128 {
15129 rtx tocsym;
20b71b17
AM
15130
15131 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15132 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15133 emit_move_insn (dest,
1de43f85 15134 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15135 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15136 }
6fb5fa3c 15137 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15138 }
20b71b17
AM
15139 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15140 {
15141 /* This is for AIX code running in non-PIC ELF32. */
15142 char buf[30];
15143 rtx realsym;
15144 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15145 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15146
6fb5fa3c
DB
15147 emit_insn (gen_elf_high (dest, realsym));
15148 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15149 }
37409796 15150 else
9ebbca7d 15151 {
37409796 15152 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15153
9ebbca7d 15154 if (TARGET_32BIT)
6fb5fa3c 15155 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15156 else
6fb5fa3c 15157 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15158 }
15159}
15160
d1d0c603
JJ
15161/* Emit instructions to restore the link register after determining where
15162 its value has been stored. */
15163
15164void
15165rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15166{
15167 rs6000_stack_t *info = rs6000_stack_info ();
15168 rtx operands[2];
15169
15170 operands[0] = source;
15171 operands[1] = scratch;
15172
15173 if (info->lr_save_p)
15174 {
15175 rtx frame_rtx = stack_pointer_rtx;
15176 HOST_WIDE_INT sp_offset = 0;
15177 rtx tmp;
15178
15179 if (frame_pointer_needed
e3b5732b 15180 || cfun->calls_alloca
d1d0c603
JJ
15181 || info->total_size > 32767)
15182 {
0be76840 15183 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15184 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15185 frame_rtx = operands[1];
15186 }
15187 else if (info->push_p)
15188 sp_offset = info->total_size;
15189
15190 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15191 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15192 emit_move_insn (tmp, operands[0]);
15193 }
15194 else
1de43f85 15195 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15196}
15197
4862826d 15198static GTY(()) alias_set_type set = -1;
f103e34d 15199
4862826d 15200alias_set_type
863d938c 15201get_TOC_alias_set (void)
9ebbca7d 15202{
f103e34d
GK
15203 if (set == -1)
15204 set = new_alias_set ();
15205 return set;
f676971a 15206}
9ebbca7d 15207
c1207243 15208/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15209 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15210 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15211#if TARGET_ELF
3c9eb5f4 15212static int
f676971a 15213uses_TOC (void)
9ebbca7d 15214{
c4501e62 15215 rtx insn;
38c1f2d7 15216
c4501e62
JJ
15217 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15218 if (INSN_P (insn))
15219 {
15220 rtx pat = PATTERN (insn);
15221 int i;
9ebbca7d 15222
f676971a 15223 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15224 for (i = 0; i < XVECLEN (pat, 0); i++)
15225 {
15226 rtx sub = XVECEXP (pat, 0, i);
15227 if (GET_CODE (sub) == USE)
15228 {
15229 sub = XEXP (sub, 0);
15230 if (GET_CODE (sub) == UNSPEC
15231 && XINT (sub, 1) == UNSPEC_TOC)
15232 return 1;
15233 }
15234 }
15235 }
15236 return 0;
9ebbca7d 15237}
c954844a 15238#endif
38c1f2d7 15239
9ebbca7d 15240rtx
f676971a 15241create_TOC_reference (rtx symbol)
9ebbca7d 15242{
b3a13419 15243 if (!can_create_pseudo_p ())
6fb5fa3c 15244 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15245 return gen_rtx_PLUS (Pmode,
a8a05998 15246 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15247 gen_rtx_CONST (Pmode,
15248 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15249 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15250}
38c1f2d7 15251
fc4767bb
JJ
15252/* If _Unwind_* has been called from within the same module,
15253 toc register is not guaranteed to be saved to 40(1) on function
15254 entry. Save it there in that case. */
c7ca610e 15255
9ebbca7d 15256void
863d938c 15257rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15258{
15259 rtx mem;
15260 rtx stack_top = gen_reg_rtx (Pmode);
15261 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15262 rtx opcode = gen_reg_rtx (SImode);
15263 rtx tocompare = gen_reg_rtx (SImode);
15264 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15265
8308679f 15266 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15267 emit_move_insn (stack_top, mem);
15268
8308679f
DE
15269 mem = gen_frame_mem (Pmode,
15270 gen_rtx_PLUS (Pmode, stack_top,
15271 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15272 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15273 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15274 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15275 : 0xE8410028, SImode));
9ebbca7d 15276
fc4767bb 15277 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15278 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15279 no_toc_save_needed);
9ebbca7d 15280
8308679f
DE
15281 mem = gen_frame_mem (Pmode,
15282 gen_rtx_PLUS (Pmode, stack_top,
15283 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15284 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15285 emit_label (no_toc_save_needed);
9ebbca7d 15286}
38c1f2d7 15287\f
0be76840
DE
15288/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15289 and the change to the stack pointer. */
ba4828e0 15290
9ebbca7d 15291static void
863d938c 15292rs6000_emit_stack_tie (void)
9ebbca7d 15293{
0be76840
DE
15294 rtx mem = gen_frame_mem (BLKmode,
15295 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15296
9ebbca7d
GK
15297 emit_insn (gen_stack_tie (mem));
15298}
38c1f2d7 15299
9ebbca7d
GK
15300/* Emit the correct code for allocating stack space, as insns.
15301 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15302 If COPY_R11, make sure a copy of the old frame is left in r11,
15303 in preference to r12 if COPY_R12.
9ebbca7d
GK
15304 The generated code may use hard register 0 as a temporary. */
15305
15306static void
f78c3290 15307rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15308{
9ebbca7d
GK
15309 rtx insn;
15310 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15311 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15312 rtx todec = gen_int_mode (-size, Pmode);
15313
15314 if (INTVAL (todec) != -size)
15315 {
d4ee4d25 15316 warning (0, "stack frame too large");
61168ff1
RS
15317 emit_insn (gen_trap ());
15318 return;
15319 }
a157febd 15320
e3b5732b 15321 if (crtl->limit_stack)
a157febd
GK
15322 {
15323 if (REG_P (stack_limit_rtx)
f676971a 15324 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15325 && REGNO (stack_limit_rtx) <= 31)
15326 {
5b71a4e7 15327 emit_insn (TARGET_32BIT
9ebbca7d
GK
15328 ? gen_addsi3 (tmp_reg,
15329 stack_limit_rtx,
15330 GEN_INT (size))
15331 : gen_adddi3 (tmp_reg,
15332 stack_limit_rtx,
15333 GEN_INT (size)));
5b71a4e7 15334
9ebbca7d
GK
15335 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15336 const0_rtx));
a157febd
GK
15337 }
15338 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15339 && TARGET_32BIT
f607bc57 15340 && DEFAULT_ABI == ABI_V4)
a157febd 15341 {
9ebbca7d 15342 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15343 gen_rtx_PLUS (Pmode,
15344 stack_limit_rtx,
9ebbca7d 15345 GEN_INT (size)));
5b71a4e7 15346
9ebbca7d
GK
15347 emit_insn (gen_elf_high (tmp_reg, toload));
15348 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15349 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15350 const0_rtx));
a157febd
GK
15351 }
15352 else
d4ee4d25 15353 warning (0, "stack limit expression is not supported");
a157febd
GK
15354 }
15355
f78c3290
NF
15356 if (copy_r12 || copy_r11 || ! TARGET_UPDATE)
15357 emit_move_insn (copy_r11
15358 ? gen_rtx_REG (Pmode, 11)
15359 : gen_rtx_REG (Pmode, 12),
15360 stack_reg);
9ebbca7d 15361
38c1f2d7
MM
15362 if (TARGET_UPDATE)
15363 {
9ebbca7d 15364 if (size > 32767)
38c1f2d7 15365 {
9ebbca7d 15366 /* Need a note here so that try_split doesn't get confused. */
9390387d 15367 if (get_last_insn () == NULL_RTX)
2e040219 15368 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15369 insn = emit_move_insn (tmp_reg, todec);
15370 try_split (PATTERN (insn), insn, 0);
15371 todec = tmp_reg;
38c1f2d7 15372 }
5b71a4e7
DE
15373
15374 insn = emit_insn (TARGET_32BIT
15375 ? gen_movsi_update (stack_reg, stack_reg,
15376 todec, stack_reg)
c4ad648e 15377 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15378 todec, stack_reg));
38c1f2d7
MM
15379 }
15380 else
15381 {
5b71a4e7
DE
15382 insn = emit_insn (TARGET_32BIT
15383 ? gen_addsi3 (stack_reg, stack_reg, todec)
15384 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d 15385 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
f78c3290
NF
15386 copy_r11
15387 ? gen_rtx_REG (Pmode, 11)
15388 : gen_rtx_REG (Pmode, 12));
9ebbca7d 15389 }
f676971a 15390
9ebbca7d 15391 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15392 REG_NOTES (insn) =
9ebbca7d 15393 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15394 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15395 gen_rtx_PLUS (Pmode, stack_reg,
15396 GEN_INT (-size))),
15397 REG_NOTES (insn));
15398}
15399
a4f6c312
SS
15400/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15401 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15402 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15403 deduce these equivalences by itself so it wasn't necessary to hold
15404 its hand so much. */
9ebbca7d
GK
15405
15406static void
f676971a 15407rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15408 rtx reg2, rtx rreg)
9ebbca7d
GK
15409{
15410 rtx real, temp;
15411
e56c4463
JL
15412 /* copy_rtx will not make unique copies of registers, so we need to
15413 ensure we don't have unwanted sharing here. */
15414 if (reg == reg2)
15415 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15416
15417 if (reg == rreg)
15418 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15419
9ebbca7d
GK
15420 real = copy_rtx (PATTERN (insn));
15421
89e7058f
AH
15422 if (reg2 != NULL_RTX)
15423 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15424
15425 real = replace_rtx (real, reg,
9ebbca7d
GK
15426 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15427 STACK_POINTER_REGNUM),
15428 GEN_INT (val)));
f676971a 15429
9ebbca7d
GK
15430 /* We expect that 'real' is either a SET or a PARALLEL containing
15431 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15432 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15433
15434 if (GET_CODE (real) == SET)
15435 {
15436 rtx set = real;
f676971a 15437
9ebbca7d
GK
15438 temp = simplify_rtx (SET_SRC (set));
15439 if (temp)
15440 SET_SRC (set) = temp;
15441 temp = simplify_rtx (SET_DEST (set));
15442 if (temp)
15443 SET_DEST (set) = temp;
15444 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15445 {
9ebbca7d
GK
15446 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15447 if (temp)
15448 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15449 }
38c1f2d7 15450 }
37409796 15451 else
9ebbca7d
GK
15452 {
15453 int i;
37409796
NS
15454
15455 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15456 for (i = 0; i < XVECLEN (real, 0); i++)
15457 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15458 {
15459 rtx set = XVECEXP (real, 0, i);
f676971a 15460
9ebbca7d
GK
15461 temp = simplify_rtx (SET_SRC (set));
15462 if (temp)
15463 SET_SRC (set) = temp;
15464 temp = simplify_rtx (SET_DEST (set));
15465 if (temp)
15466 SET_DEST (set) = temp;
15467 if (GET_CODE (SET_DEST (set)) == MEM)
15468 {
15469 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15470 if (temp)
15471 XEXP (SET_DEST (set), 0) = temp;
15472 }
15473 RTX_FRAME_RELATED_P (set) = 1;
15474 }
15475 }
c19de7aa 15476
9ebbca7d
GK
15477 RTX_FRAME_RELATED_P (insn) = 1;
15478 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15479 real,
15480 REG_NOTES (insn));
38c1f2d7
MM
15481}
15482
00b960c7
AH
15483/* Returns an insn that has a vrsave set operation with the
15484 appropriate CLOBBERs. */
15485
15486static rtx
a2369ed3 15487generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15488{
15489 int nclobs, i;
15490 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15491 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15492
a004eb82
AH
15493 clobs[0]
15494 = gen_rtx_SET (VOIDmode,
15495 vrsave,
15496 gen_rtx_UNSPEC_VOLATILE (SImode,
15497 gen_rtvec (2, reg, vrsave),
3aca4bff 15498 UNSPECV_SET_VRSAVE));
00b960c7
AH
15499
15500 nclobs = 1;
15501
9aa86737
AH
15502 /* We need to clobber the registers in the mask so the scheduler
15503 does not move sets to VRSAVE before sets of AltiVec registers.
15504
15505 However, if the function receives nonlocal gotos, reload will set
15506 all call saved registers live. We will end up with:
15507
15508 (set (reg 999) (mem))
15509 (parallel [ (set (reg vrsave) (unspec blah))
15510 (clobber (reg 999))])
15511
15512 The clobber will cause the store into reg 999 to be dead, and
15513 flow will attempt to delete an epilogue insn. In this case, we
15514 need an unspec use/set of the register. */
00b960c7
AH
15515
15516 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15517 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15518 {
15519 if (!epiloguep || call_used_regs [i])
15520 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15521 gen_rtx_REG (V4SImode, i));
15522 else
15523 {
15524 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15525
15526 clobs[nclobs++]
a004eb82
AH
15527 = gen_rtx_SET (VOIDmode,
15528 reg,
15529 gen_rtx_UNSPEC (V4SImode,
15530 gen_rtvec (1, reg), 27));
9aa86737
AH
15531 }
15532 }
00b960c7
AH
15533
15534 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15535
15536 for (i = 0; i < nclobs; ++i)
15537 XVECEXP (insn, 0, i) = clobs[i];
15538
15539 return insn;
15540}
15541
89e7058f
AH
15542/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15543 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15544
15545static void
f676971a 15546emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15547 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15548{
15549 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15550 rtx replacea, replaceb;
15551
15552 int_rtx = GEN_INT (offset);
15553
15554 /* Some cases that need register indexed addressing. */
15555 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15556 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15557 || (TARGET_SPE_ABI
15558 && SPE_VECTOR_MODE (mode)
15559 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15560 {
15561 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15562 flow path of instructions in the prologue. */
89e7058f
AH
15563 offset_rtx = gen_rtx_REG (Pmode, 11);
15564 emit_move_insn (offset_rtx, int_rtx);
15565
15566 replacea = offset_rtx;
15567 replaceb = int_rtx;
15568 }
15569 else
15570 {
15571 offset_rtx = int_rtx;
15572 replacea = NULL_RTX;
15573 replaceb = NULL_RTX;
15574 }
15575
15576 reg = gen_rtx_REG (mode, regno);
15577 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15578 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15579
15580 insn = emit_move_insn (mem, reg);
15581
15582 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15583}
15584
a3170dc6
AH
15585/* Emit an offset memory reference suitable for a frame store, while
15586 converting to a valid addressing mode. */
15587
15588static rtx
a2369ed3 15589gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15590{
15591 rtx int_rtx, offset_rtx;
15592
15593 int_rtx = GEN_INT (offset);
15594
4d4cbc0e 15595 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15596 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15597 {
15598 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15599 emit_move_insn (offset_rtx, int_rtx);
15600 }
15601 else
15602 offset_rtx = int_rtx;
15603
0be76840 15604 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15605}
15606
6d0a8091
DJ
15607/* Look for user-defined global regs. We should not save and restore these,
15608 and cannot use stmw/lmw if there are any in its range. */
15609
15610static bool
f78c3290 15611no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15612{
15613 int i;
f78c3290
NF
15614 for (i = first; i < gpr ? 32 : 64 ; i++)
15615 if (global_regs[i])
6d0a8091
DJ
15616 return false;
15617 return true;
15618}
15619
699c914a
MS
15620#ifndef TARGET_FIX_AND_CONTINUE
15621#define TARGET_FIX_AND_CONTINUE 0
15622#endif
15623
f78c3290
NF
15624/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15625#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15626#define LAST_SAVRES_REGISTER 31
15627#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15628
15629static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15630
15631/* Return the symbol for an out-of-line register save/restore routine.
15632 We are saving/restoring GPRs if GPR is true. */
15633
15634static rtx
15635rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15636{
15637 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15638 rtx sym;
15639 int select = ((savep ? 1 : 0) << 2
15640 | (gpr
15641 /* On the SPE, we never have any FPRs, but we do have
15642 32/64-bit versions of the routines. */
15643 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15644 : 0) << 1
15645 | (exitp ? 1: 0));
15646
15647 /* Don't generate bogus routine names. */
15648 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15649
15650 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15651
15652 if (sym == NULL)
15653 {
15654 char name[30];
15655 const char *action;
15656 const char *regkind;
15657 const char *exit_suffix;
15658
15659 action = savep ? "save" : "rest";
15660
15661 /* SPE has slightly different names for its routines depending on
15662 whether we are saving 32-bit or 64-bit registers. */
15663 if (TARGET_SPE_ABI)
15664 {
15665 /* No floating point saves on the SPE. */
15666 gcc_assert (gpr);
15667
15668 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15669 }
15670 else
15671 regkind = gpr ? "gpr" : "fpr";
15672
15673 exit_suffix = exitp ? "_x" : "";
15674
15675 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15676
15677 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15678 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15679 }
15680
15681 return sym;
15682}
15683
15684/* Emit a sequence of insns, including a stack tie if needed, for
15685 resetting the stack pointer. If SAVRES is true, then don't reset the
15686 stack pointer, but move the base of the frame into r11 for use by
15687 out-of-line register restore routines. */
15688
15689static void
15690rs6000_emit_stack_reset (rs6000_stack_t *info,
15691 rtx sp_reg_rtx, rtx frame_reg_rtx,
15692 int sp_offset, bool savres)
15693{
15694 /* This blockage is needed so that sched doesn't decide to move
15695 the sp change before the register restores. */
15696 if (frame_reg_rtx != sp_reg_rtx
15697 || (TARGET_SPE_ABI
15698 && info->spe_64bit_regs_used != 0
15699 && info->first_gp_reg_save != 32))
15700 rs6000_emit_stack_tie ();
15701
15702 if (frame_reg_rtx != sp_reg_rtx)
15703 {
15704 rs6000_emit_stack_tie ();
15705 if (sp_offset != 0)
15706 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15707 GEN_INT (sp_offset)));
15708 else if (!savres)
15709 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15710 }
15711 else if (sp_offset != 0)
15712 {
15713 /* If we are restoring registers out-of-line, we will be using the
15714 "exit" variants of the restore routines, which will reset the
15715 stack for us. But we do need to point r11 into the right place
15716 for those routines. */
15717 rtx dest_reg = (savres
15718 ? gen_rtx_REG (Pmode, 11)
15719 : sp_reg_rtx);
15720
15721 emit_insn (TARGET_32BIT
15722 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15723 GEN_INT (sp_offset))
15724 : gen_adddi3 (dest_reg, sp_reg_rtx,
15725 GEN_INT (sp_offset)));
15726 }
15727}
15728
15729/* Construct a parallel rtx describing the effect of a call to an
15730 out-of-line register save/restore routine. */
15731
15732static rtx
15733rs6000_make_savres_rtx (rs6000_stack_t *info,
15734 rtx frame_reg_rtx, int save_area_offset,
15735 enum machine_mode reg_mode,
15736 bool savep, bool gpr, bool exitp)
15737{
15738 int i;
15739 int offset, start_reg, end_reg, n_regs;
15740 int reg_size = GET_MODE_SIZE (reg_mode);
15741 rtx sym;
15742 rtvec p;
15743
15744 offset = 0;
15745 start_reg = (gpr
15746 ? info->first_gp_reg_save
15747 : info->first_fp_reg_save);
15748 end_reg = gpr ? 32 : 64;
15749 n_regs = end_reg - start_reg;
15750 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15751
15752 /* If we're saving registers, then we should never say we're exiting. */
15753 gcc_assert ((savep && !exitp) || !savep);
15754
15755 if (exitp)
15756 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15757
15758 RTVEC_ELT (p, offset++)
15759 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15760
15761 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15762 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15763 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15764
15765 for (i = 0; i < end_reg - start_reg; i++)
15766 {
15767 rtx addr, reg, mem;
15768 reg = gen_rtx_REG (reg_mode, start_reg + i);
15769 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15770 GEN_INT (save_area_offset + reg_size*i));
15771 mem = gen_frame_mem (reg_mode, addr);
15772
15773 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15774 savep ? mem : reg,
15775 savep ? reg : mem);
15776 }
15777
15778 return gen_rtx_PARALLEL (VOIDmode, p);
15779}
15780
52ff33d0
NF
15781/* Determine whether the gp REG is really used. */
15782
15783static bool
15784rs6000_reg_live_or_pic_offset_p (int reg)
15785{
6fb5fa3c 15786 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15787 && (!call_used_regs[reg]
15788 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15789 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15790 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15791 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15792 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15793}
15794
f78c3290
NF
15795enum {
15796 SAVRES_MULTIPLE = 0x1,
15797 SAVRES_INLINE_FPRS = 0x2,
15798 SAVRES_INLINE_GPRS = 0x4
15799};
15800
15801/* Determine the strategy for savings/restoring registers. */
15802
15803static int
15804rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
15805 int using_static_chain_p, int sibcall)
15806{
15807 bool using_multiple_p;
15808 bool common;
15809 bool savres_fprs_inline;
15810 bool savres_gprs_inline;
15811 bool noclobber_global_gprs
15812 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
15813
15814 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
15815 && (!TARGET_SPE_ABI
15816 || info->spe_64bit_regs_used == 0)
15817 && info->first_gp_reg_save < 31
15818 && noclobber_global_gprs);
15819 /* Don't bother to try to save things out-of-line if r11 is occupied
15820 by the static chain. It would require too much fiddling and the
15821 static chain is rarely used anyway. */
15822 common = (using_static_chain_p
15823 || sibcall
15824 || crtl->calls_eh_return
15825 || !info->lr_save_p
15826 || cfun->machine->ra_need_lr
15827 || info->total_size > 32767);
15828 savres_fprs_inline = (common
15829 || info->first_fp_reg_save == 64
15830 || !no_global_regs_above (info->first_fp_reg_save,
15831 /*gpr=*/false)
15832 || FP_SAVE_INLINE (info->first_fp_reg_save));
15833 savres_gprs_inline = (common
15834 /* Saving CR interferes with the exit routines
15835 used on the SPE, so just punt here. */
15836 || (!savep
15837 && TARGET_SPE_ABI
15838 && info->spe_64bit_regs_used != 0
15839 && info->cr_save_p != 0)
15840 || info->first_gp_reg_save == 32
15841 || !noclobber_global_gprs
15842 || GP_SAVE_INLINE (info->first_gp_reg_save));
15843
15844 if (savep)
15845 /* If we are going to use store multiple, then don't even bother
15846 with the out-of-line routines, since the store-multiple instruction
15847 will always be smaller. */
15848 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15849 else
15850 {
15851 /* The situation is more complicated with load multiple. We'd
15852 prefer to use the out-of-line routines for restores, since the
15853 "exit" out-of-line routines can handle the restore of LR and
15854 the frame teardown. But we can only use the out-of-line
15855 routines if we know that we've used store multiple or
15856 out-of-line routines in the prologue, i.e. if we've saved all
15857 the registers from first_gp_reg_save. Otherwise, we risk
15858 loading garbage from the stack. Furthermore, we can only use
15859 the "exit" out-of-line gpr restore if we haven't saved any
15860 fprs. */
15861 bool saved_all = !savres_gprs_inline || using_multiple_p;
15862
15863 if (saved_all && info->first_fp_reg_save != 64)
15864 /* We can't use the exit routine; use load multiple if it's
15865 available. */
15866 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15867 }
15868
15869 return (using_multiple_p
15870 | (savres_fprs_inline << 1)
15871 | (savres_gprs_inline << 2));
15872}
15873
9ebbca7d
GK
15874/* Emit function prologue as insns. */
15875
9878760c 15876void
863d938c 15877rs6000_emit_prologue (void)
9878760c 15878{
4697a36c 15879 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15880 enum machine_mode reg_mode = Pmode;
327e5343 15881 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15882 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15883 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15884 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15885 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 15886 rtx insn;
f78c3290 15887 int strategy;
9ebbca7d 15888 int saving_FPRs_inline;
f78c3290 15889 int saving_GPRs_inline;
9ebbca7d 15890 int using_store_multiple;
f78c3290
NF
15891 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
15892 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
15893 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 15894 HOST_WIDE_INT sp_offset = 0;
f676971a 15895
699c914a
MS
15896 if (TARGET_FIX_AND_CONTINUE)
15897 {
15898 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15899 address by modifying the first 5 instructions of the function
699c914a
MS
15900 to branch to the overriding function. This is necessary to
15901 permit function pointers that point to the old function to
15902 actually forward to the new function. */
15903 emit_insn (gen_nop ());
15904 emit_insn (gen_nop ());
de2ab0ca 15905 emit_insn (gen_nop ());
699c914a
MS
15906 emit_insn (gen_nop ());
15907 emit_insn (gen_nop ());
15908 }
15909
15910 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15911 {
15912 reg_mode = V2SImode;
15913 reg_size = 8;
15914 }
a3170dc6 15915
f78c3290
NF
15916 strategy = rs6000_savres_strategy (info, /*savep=*/true,
15917 /*static_chain_p=*/using_static_chain_p,
15918 /*sibcall=*/0);
15919 using_store_multiple = strategy & SAVRES_MULTIPLE;
15920 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
15921 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
15922
15923 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15924 if (! WORLD_SAVE_P (info)
15925 && info->push_p
acd0b319 15926 && (DEFAULT_ABI == ABI_V4
e3b5732b 15927 || crtl->calls_eh_return))
9ebbca7d 15928 {
f78c3290
NF
15929 bool need_r11 = (TARGET_SPE
15930 ? (!saving_GPRs_inline
15931 && info->spe_64bit_regs_used == 0)
15932 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
15933 if (info->total_size < 32767)
15934 sp_offset = info->total_size;
15935 else
f78c3290
NF
15936 frame_reg_rtx = (need_r11
15937 ? gen_rtx_REG (Pmode, 11)
15938 : frame_ptr_rtx);
f676971a 15939 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15940 (frame_reg_rtx != sp_reg_rtx
15941 && (info->cr_save_p
15942 || info->lr_save_p
15943 || info->first_fp_reg_save < 64
15944 || info->first_gp_reg_save < 32
f78c3290
NF
15945 )),
15946 need_r11);
9ebbca7d
GK
15947 if (frame_reg_rtx != sp_reg_rtx)
15948 rs6000_emit_stack_tie ();
15949 }
15950
d62294f5 15951 /* Handle world saves specially here. */
f57fe068 15952 if (WORLD_SAVE_P (info))
d62294f5
FJ
15953 {
15954 int i, j, sz;
15955 rtx treg;
15956 rtvec p;
22fa69da 15957 rtx reg0;
d62294f5
FJ
15958
15959 /* save_world expects lr in r0. */
22fa69da 15960 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15961 if (info->lr_save_p)
c4ad648e 15962 {
22fa69da 15963 insn = emit_move_insn (reg0,
1de43f85 15964 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15965 RTX_FRAME_RELATED_P (insn) = 1;
15966 }
d62294f5
FJ
15967
15968 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15969 assumptions about the offsets of various bits of the stack
992d08b1 15970 frame. */
37409796
NS
15971 gcc_assert (info->gp_save_offset == -220
15972 && info->fp_save_offset == -144
15973 && info->lr_save_offset == 8
15974 && info->cr_save_offset == 4
15975 && info->push_p
15976 && info->lr_save_p
e3b5732b 15977 && (!crtl->calls_eh_return
37409796
NS
15978 || info->ehrd_offset == -432)
15979 && info->vrsave_save_offset == -224
22fa69da 15980 && info->altivec_save_offset == -416);
d62294f5
FJ
15981
15982 treg = gen_rtx_REG (SImode, 11);
15983 emit_move_insn (treg, GEN_INT (-info->total_size));
15984
15985 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15986 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15987
15988 /* Preserve CR2 for save_world prologues */
22fa69da 15989 sz = 5;
d62294f5
FJ
15990 sz += 32 - info->first_gp_reg_save;
15991 sz += 64 - info->first_fp_reg_save;
15992 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15993 p = rtvec_alloc (sz);
15994 j = 0;
15995 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15996 gen_rtx_REG (SImode,
1de43f85 15997 LR_REGNO));
d62294f5 15998 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15999 gen_rtx_SYMBOL_REF (Pmode,
16000 "*save_world"));
d62294f5 16001 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16002 properly. */
16003 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16004 {
16005 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16006 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16007 GEN_INT (info->fp_save_offset
16008 + sp_offset + 8 * i));
0be76840 16009 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16010
16011 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16012 }
d62294f5 16013 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16014 {
16015 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16016 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16017 GEN_INT (info->altivec_save_offset
16018 + sp_offset + 16 * i));
0be76840 16019 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16020
16021 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16022 }
d62294f5 16023 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16024 {
16025 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16026 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16027 GEN_INT (info->gp_save_offset
16028 + sp_offset + reg_size * i));
0be76840 16029 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16030
16031 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16032 }
16033
16034 {
16035 /* CR register traditionally saved as CR2. */
16036 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16037 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16038 GEN_INT (info->cr_save_offset
16039 + sp_offset));
0be76840 16040 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16041
16042 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16043 }
22fa69da
GK
16044 /* Explain about use of R0. */
16045 if (info->lr_save_p)
16046 {
16047 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16048 GEN_INT (info->lr_save_offset
16049 + sp_offset));
16050 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16051
22fa69da
GK
16052 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16053 }
16054 /* Explain what happens to the stack pointer. */
16055 {
16056 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16057 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16058 }
d62294f5
FJ
16059
16060 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16061 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16062 treg, GEN_INT (-info->total_size));
16063 sp_offset = info->total_size;
d62294f5
FJ
16064 }
16065
9ebbca7d 16066 /* If we use the link register, get it into r0. */
f57fe068 16067 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16068 {
52ff33d0
NF
16069 rtx addr, reg, mem;
16070
f8a57be8 16071 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16072 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16073 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16074
16075 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16076 GEN_INT (info->lr_save_offset + sp_offset));
16077 reg = gen_rtx_REG (Pmode, 0);
16078 mem = gen_rtx_MEM (Pmode, addr);
16079 /* This should not be of rs6000_sr_alias_set, because of
16080 __builtin_return_address. */
16081
16082 insn = emit_move_insn (mem, reg);
16083 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16084 NULL_RTX, NULL_RTX);
f8a57be8 16085 }
9ebbca7d
GK
16086
16087 /* If we need to save CR, put it into r12. */
f57fe068 16088 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16089 {
f8a57be8 16090 rtx set;
f676971a 16091
9ebbca7d 16092 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16093 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16094 RTX_FRAME_RELATED_P (insn) = 1;
16095 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16096 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16097 But that's OK. All we have to do is specify that _one_ condition
16098 code register is saved in this stack slot. The thrower's epilogue
16099 will then restore all the call-saved registers.
16100 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16101 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16102 gen_rtx_REG (SImode, CR2_REGNO));
16103 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16104 set,
16105 REG_NOTES (insn));
9ebbca7d
GK
16106 }
16107
a4f6c312
SS
16108 /* Do any required saving of fpr's. If only one or two to save, do
16109 it ourselves. Otherwise, call function. */
f57fe068 16110 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16111 {
16112 int i;
16113 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16114 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16115 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
16116 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
16117 info->first_fp_reg_save + i,
16118 info->fp_save_offset + sp_offset + 8 * i,
16119 info->total_size);
9ebbca7d 16120 }
f57fe068 16121 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16122 {
16123 rtx par;
16124
16125 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16126 info->fp_save_offset + sp_offset,
16127 DFmode,
16128 /*savep=*/true, /*gpr=*/false,
16129 /*exitp=*/false);
16130 insn = emit_insn (par);
16131 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16132 NULL_RTX, NULL_RTX);
16133 }
16134
16135 /* Save GPRs. This is done as a PARALLEL if we are using
16136 the store-multiple instructions. */
16137 if (!WORLD_SAVE_P (info)
16138 && TARGET_SPE_ABI
16139 && info->spe_64bit_regs_used != 0
16140 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16141 {
16142 int i;
f78c3290
NF
16143 rtx spe_save_area_ptr;
16144
16145 /* Determine whether we can address all of the registers that need
16146 to be saved with an offset from the stack pointer that fits in
16147 the small const field for SPE memory instructions. */
16148 int spe_regs_addressable_via_sp
16149 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16150 + (32 - info->first_gp_reg_save - 1) * reg_size)
16151 && saving_GPRs_inline);
16152 int spe_offset;
16153
16154 if (spe_regs_addressable_via_sp)
16155 {
16156 spe_save_area_ptr = frame_reg_rtx;
16157 spe_offset = info->spe_gp_save_offset + sp_offset;
16158 }
16159 else
16160 {
16161 /* Make r11 point to the start of the SPE save area. We need
16162 to be careful here if r11 is holding the static chain. If
16163 it is, then temporarily save it in r0. We would use r0 as
16164 our base register here, but using r0 as a base register in
16165 loads and stores means something different from what we
16166 would like. */
16167 int ool_adjust = (saving_GPRs_inline
16168 ? 0
16169 : (info->first_gp_reg_save
16170 - (FIRST_SAVRES_REGISTER+1))*8);
16171 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16172 + sp_offset - ool_adjust);
16173
16174 if (using_static_chain_p)
16175 {
16176 rtx r0 = gen_rtx_REG (Pmode, 0);
16177 gcc_assert (info->first_gp_reg_save > 11);
16178
16179 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16180 }
16181
16182 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16183 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16184 frame_reg_rtx,
16185 GEN_INT (offset)));
16186 /* We need to make sure the move to r11 gets noted for
16187 properly outputting unwind information. */
16188 if (!saving_GPRs_inline)
16189 rs6000_frame_related (insn, frame_reg_rtx, offset,
16190 NULL_RTX, NULL_RTX);
16191 spe_offset = 0;
16192 }
16193
16194 if (saving_GPRs_inline)
16195 {
16196 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16197 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16198 {
16199 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16200 rtx offset, addr, mem;
f676971a 16201
f78c3290
NF
16202 /* We're doing all this to ensure that the offset fits into
16203 the immediate offset of 'evstdd'. */
16204 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16205
16206 offset = GEN_INT (reg_size * i + spe_offset);
16207 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16208 mem = gen_rtx_MEM (V2SImode, addr);
16209
16210 insn = emit_move_insn (mem, reg);
16211
16212 rs6000_frame_related (insn, spe_save_area_ptr,
16213 info->spe_gp_save_offset
16214 + sp_offset + reg_size * i,
16215 offset, const0_rtx);
16216 }
16217 }
16218 else
9ebbca7d 16219 {
f78c3290 16220 rtx par;
9ebbca7d 16221
f78c3290
NF
16222 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16223 0, reg_mode,
16224 /*savep=*/true, /*gpr=*/true,
16225 /*exitp=*/false);
16226 insn = emit_insn (par);
16227 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16228 NULL_RTX, NULL_RTX);
9ebbca7d 16229 }
f78c3290
NF
16230
16231
16232 /* Move the static chain pointer back. */
16233 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16234 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16235 }
16236 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16237 {
16238 rtx par;
16239
16240 /* Need to adjust r11 if we saved any FPRs. */
16241 if (info->first_fp_reg_save != 64)
16242 {
16243 rtx r11 = gen_rtx_REG (reg_mode, 11);
16244 rtx offset = GEN_INT (info->total_size
16245 + (-8 * (64-info->first_fp_reg_save)));
16246 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16247 ? sp_reg_rtx : r11);
16248
16249 emit_insn (TARGET_32BIT
16250 ? gen_addsi3 (r11, ptr_reg, offset)
16251 : gen_adddi3 (r11, ptr_reg, offset));
16252 }
16253
16254 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16255 info->gp_save_offset + sp_offset,
16256 reg_mode,
16257 /*savep=*/true, /*gpr=*/true,
16258 /*exitp=*/false);
16259 insn = emit_insn (par);
f676971a 16260 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16261 NULL_RTX, NULL_RTX);
16262 }
f78c3290 16263 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16264 {
308c142a 16265 rtvec p;
9ebbca7d
GK
16266 int i;
16267 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16268 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16269 {
16270 rtx addr, reg, mem;
16271 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16272 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16273 GEN_INT (info->gp_save_offset
16274 + sp_offset
9ebbca7d 16275 + reg_size * i));
0be76840 16276 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16277
16278 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16279 }
16280 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16281 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16282 NULL_RTX, NULL_RTX);
b6c9286a 16283 }
f57fe068 16284 else if (!WORLD_SAVE_P (info))
b6c9286a 16285 {
9ebbca7d
GK
16286 int i;
16287 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16288 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16289 {
16290 rtx addr, reg, mem;
16291 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16292
52ff33d0
NF
16293 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16294 GEN_INT (info->gp_save_offset
16295 + sp_offset
16296 + reg_size * i));
16297 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16298
52ff33d0
NF
16299 insn = emit_move_insn (mem, reg);
16300 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16301 NULL_RTX, NULL_RTX);
16302 }
9ebbca7d
GK
16303 }
16304
83720594
RH
16305 /* ??? There's no need to emit actual instructions here, but it's the
16306 easiest way to get the frame unwind information emitted. */
e3b5732b 16307 if (crtl->calls_eh_return)
83720594 16308 {
78e1b90d
DE
16309 unsigned int i, regno;
16310
fc4767bb
JJ
16311 /* In AIX ABI we need to pretend we save r2 here. */
16312 if (TARGET_AIX)
16313 {
16314 rtx addr, reg, mem;
16315
16316 reg = gen_rtx_REG (reg_mode, 2);
16317 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16318 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16319 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16320
16321 insn = emit_move_insn (mem, reg);
f676971a 16322 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16323 NULL_RTX, NULL_RTX);
16324 PATTERN (insn) = gen_blockage ();
16325 }
16326
83720594
RH
16327 for (i = 0; ; ++i)
16328 {
83720594
RH
16329 regno = EH_RETURN_DATA_REGNO (i);
16330 if (regno == INVALID_REGNUM)
16331 break;
16332
89e7058f
AH
16333 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16334 info->ehrd_offset + sp_offset
16335 + reg_size * (int) i,
16336 info->total_size);
83720594
RH
16337 }
16338 }
16339
9ebbca7d 16340 /* Save CR if we use any that must be preserved. */
f57fe068 16341 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16342 {
16343 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16344 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16345 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16346 /* See the large comment above about why CR2_REGNO is used. */
16347 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16348
9ebbca7d
GK
16349 /* If r12 was used to hold the original sp, copy cr into r0 now
16350 that it's free. */
16351 if (REGNO (frame_reg_rtx) == 12)
16352 {
f8a57be8
GK
16353 rtx set;
16354
9ebbca7d 16355 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16356 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16357 RTX_FRAME_RELATED_P (insn) = 1;
16358 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16359 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16360 set,
16361 REG_NOTES (insn));
f676971a 16362
9ebbca7d
GK
16363 }
16364 insn = emit_move_insn (mem, cr_save_rtx);
16365
f676971a 16366 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16367 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16368 }
16369
f676971a 16370 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16371 for which it was done previously. */
f57fe068 16372 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16373 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16374 {
bcb2d701 16375 if (info->total_size < 32767)
2b2c2fe5 16376 sp_offset = info->total_size;
bcb2d701
EC
16377 else
16378 frame_reg_rtx = frame_ptr_rtx;
16379 rs6000_emit_allocate_stack (info->total_size,
16380 (frame_reg_rtx != sp_reg_rtx
16381 && ((info->altivec_size != 0)
16382 || (info->vrsave_mask != 0)
f78c3290
NF
16383 )),
16384 FALSE);
bcb2d701
EC
16385 if (frame_reg_rtx != sp_reg_rtx)
16386 rs6000_emit_stack_tie ();
2b2c2fe5 16387 }
9ebbca7d
GK
16388
16389 /* Set frame pointer, if needed. */
16390 if (frame_pointer_needed)
16391 {
7d5175e1 16392 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16393 sp_reg_rtx);
16394 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16395 }
9878760c 16396
2b2c2fe5
EC
16397 /* Save AltiVec registers if needed. Save here because the red zone does
16398 not include AltiVec registers. */
16399 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16400 {
16401 int i;
16402
16403 /* There should be a non inline version of this, for when we
16404 are saving lots of vector registers. */
16405 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16406 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16407 {
16408 rtx areg, savereg, mem;
16409 int offset;
16410
16411 offset = info->altivec_save_offset + sp_offset
16412 + 16 * (i - info->first_altivec_reg_save);
16413
16414 savereg = gen_rtx_REG (V4SImode, i);
16415
16416 areg = gen_rtx_REG (Pmode, 0);
16417 emit_move_insn (areg, GEN_INT (offset));
16418
16419 /* AltiVec addressing mode is [reg+reg]. */
16420 mem = gen_frame_mem (V4SImode,
16421 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16422
16423 insn = emit_move_insn (mem, savereg);
16424
16425 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16426 areg, GEN_INT (offset));
16427 }
16428 }
16429
16430 /* VRSAVE is a bit vector representing which AltiVec registers
16431 are used. The OS uses this to determine which vector
16432 registers to save on a context switch. We need to save
16433 VRSAVE on the stack frame, add whatever AltiVec registers we
16434 used in this function, and do the corresponding magic in the
16435 epilogue. */
16436
16437 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16438 && info->vrsave_mask != 0)
16439 {
16440 rtx reg, mem, vrsave;
16441 int offset;
16442
16443 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16444 as frame_reg_rtx and r11 as the static chain pointer for
16445 nested functions. */
16446 reg = gen_rtx_REG (SImode, 0);
16447 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16448 if (TARGET_MACHO)
16449 emit_insn (gen_get_vrsave_internal (reg));
16450 else
16451 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16452
16453 if (!WORLD_SAVE_P (info))
16454 {
16455 /* Save VRSAVE. */
16456 offset = info->vrsave_save_offset + sp_offset;
16457 mem = gen_frame_mem (SImode,
16458 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16459 GEN_INT (offset)));
16460 insn = emit_move_insn (mem, reg);
16461 }
16462
16463 /* Include the registers in the mask. */
16464 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16465
16466 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16467 }
16468
1db02437 16469 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16470 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16471 || (DEFAULT_ABI == ABI_V4
16472 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16473 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16474 {
16475 /* If emit_load_toc_table will use the link register, we need to save
16476 it. We use R12 for this purpose because emit_load_toc_table
16477 can use register 0. This allows us to use a plain 'blr' to return
16478 from the procedure more often. */
16479 int save_LR_around_toc_setup = (TARGET_ELF
16480 && DEFAULT_ABI != ABI_AIX
16481 && flag_pic
16482 && ! info->lr_save_p
16483 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16484 if (save_LR_around_toc_setup)
16485 {
1de43f85 16486 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16487
c4ad648e 16488 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16489 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16490
c4ad648e 16491 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16492
c4ad648e 16493 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16494 RTX_FRAME_RELATED_P (insn) = 1;
16495 }
16496 else
16497 rs6000_emit_load_toc_table (TRUE);
16498 }
ee890fe2 16499
fcce224d 16500#if TARGET_MACHO
ee890fe2 16501 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16502 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16503 {
1de43f85 16504 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16505 rtx src = machopic_function_base_sym ();
ee890fe2 16506
6d0a8091
DJ
16507 /* Save and restore LR locally around this call (in R0). */
16508 if (!info->lr_save_p)
6fb5fa3c 16509 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16510
6fb5fa3c 16511 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16512
6fb5fa3c
DB
16513 emit_move_insn (gen_rtx_REG (Pmode,
16514 RS6000_PIC_OFFSET_TABLE_REGNUM),
16515 lr);
6d0a8091
DJ
16516
16517 if (!info->lr_save_p)
6fb5fa3c 16518 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16519 }
fcce224d 16520#endif
9ebbca7d
GK
16521}
16522
9ebbca7d 16523/* Write function prologue. */
a4f6c312 16524
08c148a8 16525static void
f676971a 16526rs6000_output_function_prologue (FILE *file,
a2369ed3 16527 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16528{
16529 rs6000_stack_t *info = rs6000_stack_info ();
16530
4697a36c
MM
16531 if (TARGET_DEBUG_STACK)
16532 debug_stack_info (info);
9878760c 16533
a4f6c312
SS
16534 /* Write .extern for any function we will call to save and restore
16535 fp values. */
16536 if (info->first_fp_reg_save < 64
16537 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16538 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16539 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16540 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16541
c764f757
RK
16542 /* Write .extern for AIX common mode routines, if needed. */
16543 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16544 {
f6709c70
JW
16545 fputs ("\t.extern __mulh\n", file);
16546 fputs ("\t.extern __mull\n", file);
16547 fputs ("\t.extern __divss\n", file);
16548 fputs ("\t.extern __divus\n", file);
16549 fputs ("\t.extern __quoss\n", file);
16550 fputs ("\t.extern __quous\n", file);
c764f757
RK
16551 common_mode_defined = 1;
16552 }
9878760c 16553
9ebbca7d 16554 if (! HAVE_prologue)
979721f8 16555 {
9ebbca7d 16556 start_sequence ();
9dda4cc8 16557
a4f6c312
SS
16558 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16559 the "toplevel" insn chain. */
2e040219 16560 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16561 rs6000_emit_prologue ();
2e040219 16562 emit_note (NOTE_INSN_DELETED);
178c3eff 16563
a3c9585f 16564 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16565 {
16566 rtx insn;
16567 unsigned addr = 0;
16568 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16569 {
16570 INSN_ADDRESSES_NEW (insn, addr);
16571 addr += 4;
16572 }
16573 }
9dda4cc8 16574
9ebbca7d 16575 if (TARGET_DEBUG_STACK)
a4f6c312 16576 debug_rtx_list (get_insns (), 100);
c9d691e9 16577 final (get_insns (), file, FALSE);
9ebbca7d 16578 end_sequence ();
979721f8
MM
16579 }
16580
9ebbca7d
GK
16581 rs6000_pic_labelno++;
16582}
f676971a 16583
1c9c5e43
AM
16584/* Non-zero if vmx regs are restored before the frame pop, zero if
16585 we restore after the pop when possible. */
16586#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16587
f78c3290
NF
16588/* Reload CR from REG. */
16589
16590static void
16591rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16592{
16593 int count = 0;
16594 int i;
16595
16596 if (using_mfcr_multiple)
16597 {
16598 for (i = 0; i < 8; i++)
16599 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16600 count++;
16601 gcc_assert (count);
16602 }
16603
16604 if (using_mfcr_multiple && count > 1)
16605 {
16606 rtvec p;
16607 int ndx;
16608
16609 p = rtvec_alloc (count);
16610
16611 ndx = 0;
16612 for (i = 0; i < 8; i++)
16613 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16614 {
16615 rtvec r = rtvec_alloc (2);
16616 RTVEC_ELT (r, 0) = reg;
16617 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16618 RTVEC_ELT (p, ndx) =
16619 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16620 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16621 ndx++;
16622 }
16623 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16624 gcc_assert (ndx == count);
16625 }
16626 else
16627 for (i = 0; i < 8; i++)
16628 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16629 {
16630 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16631 CR0_REGNO+i),
16632 reg));
16633 }
16634}
16635
9ebbca7d 16636/* Emit function epilogue as insns.
9878760c 16637
9ebbca7d
GK
16638 At present, dwarf2out_frame_debug_expr doesn't understand
16639 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16640 anywhere in the epilogue. Most of the insns below would in any case
16641 need special notes to explain where r11 is in relation to the stack. */
9878760c 16642
9ebbca7d 16643void
a2369ed3 16644rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16645{
16646 rs6000_stack_t *info;
f78c3290 16647 int restoring_GPRs_inline;
9ebbca7d
GK
16648 int restoring_FPRs_inline;
16649 int using_load_multiple;
d296e02e 16650 int using_mtcr_multiple;
9ebbca7d 16651 int use_backchain_to_restore_sp;
f78c3290
NF
16652 int restore_lr;
16653 int strategy;
9ebbca7d
GK
16654 int sp_offset = 0;
16655 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16656 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16657 enum machine_mode reg_mode = Pmode;
327e5343 16658 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16659 int i;
16660
c19de7aa
AH
16661 info = rs6000_stack_info ();
16662
16663 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16664 {
16665 reg_mode = V2SImode;
16666 reg_size = 8;
16667 }
16668
f78c3290
NF
16669 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16670 /*static_chain_p=*/0, sibcall);
16671 using_load_multiple = strategy & SAVRES_MULTIPLE;
16672 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16673 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16674 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16675 || rs6000_cpu == PROCESSOR_PPC603
16676 || rs6000_cpu == PROCESSOR_PPC750
16677 || optimize_size);
1c9c5e43
AM
16678 /* Restore via the backchain when we have a large frame, since this
16679 is more efficient than an addis, addi pair. The second condition
16680 here will not trigger at the moment; We don't actually need a
16681 frame pointer for alloca, but the generic parts of the compiler
16682 give us one anyway. */
16683 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16684 || info->total_size
16685 + (info->lr_save_p ? info->lr_save_offset : 0)
16686 > 32767
1c9c5e43
AM
16687 || (cfun->calls_alloca
16688 && !frame_pointer_needed));
f78c3290
NF
16689 restore_lr = (info->lr_save_p
16690 && restoring_GPRs_inline
16691 && restoring_FPRs_inline);
9ebbca7d 16692
f57fe068 16693 if (WORLD_SAVE_P (info))
d62294f5
FJ
16694 {
16695 int i, j;
16696 char rname[30];
16697 const char *alloc_rname;
16698 rtvec p;
16699
16700 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16701 stack slot (which is not likely to be our caller.)
16702 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16703 rest_world is similar, except any R10 parameter is ignored.
16704 The exception-handling stuff that was here in 2.95 is no
16705 longer necessary. */
d62294f5
FJ
16706
16707 p = rtvec_alloc (9
16708 + 1
f676971a 16709 + 32 - info->first_gp_reg_save
c4ad648e
AM
16710 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16711 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16712
e3b5732b 16713 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16714 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16715 alloc_rname = ggc_strdup (rname);
16716
16717 j = 0;
16718 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16719 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16720 gen_rtx_REG (Pmode,
1de43f85 16721 LR_REGNO));
d62294f5 16722 RTVEC_ELT (p, j++)
c4ad648e 16723 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16724 /* The instruction pattern requires a clobber here;
c4ad648e 16725 it is shared with the restVEC helper. */
d62294f5 16726 RTVEC_ELT (p, j++)
c4ad648e 16727 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16728
16729 {
c4ad648e
AM
16730 /* CR register traditionally saved as CR2. */
16731 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16732 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16733 GEN_INT (info->cr_save_offset));
0be76840 16734 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16735
16736 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16737 }
16738
16739 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16740 {
16741 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16742 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16743 GEN_INT (info->gp_save_offset
16744 + reg_size * i));
0be76840 16745 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16746
16747 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16748 }
d62294f5 16749 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16750 {
16751 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16752 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16753 GEN_INT (info->altivec_save_offset
16754 + 16 * i));
0be76840 16755 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16756
16757 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16758 }
d62294f5 16759 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16760 {
16761 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16762 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16763 GEN_INT (info->fp_save_offset
16764 + 8 * i));
0be76840 16765 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16766
16767 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16768 }
d62294f5 16769 RTVEC_ELT (p, j++)
c4ad648e 16770 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16771 RTVEC_ELT (p, j++)
c4ad648e 16772 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16773 RTVEC_ELT (p, j++)
c4ad648e 16774 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16775 RTVEC_ELT (p, j++)
c4ad648e 16776 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16777 RTVEC_ELT (p, j++)
c4ad648e 16778 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16779 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16780
16781 return;
16782 }
16783
45b194f8
AM
16784 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16785 if (info->push_p)
2b2c2fe5 16786 sp_offset = info->total_size;
f676971a 16787
e6477eaa
AM
16788 /* Restore AltiVec registers if we must do so before adjusting the
16789 stack. */
16790 if (TARGET_ALTIVEC_ABI
16791 && info->altivec_size != 0
1c9c5e43
AM
16792 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16793 || (DEFAULT_ABI != ABI_V4
16794 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
16795 {
16796 int i;
16797
e6477eaa
AM
16798 if (use_backchain_to_restore_sp)
16799 {
16800 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16801 emit_move_insn (frame_reg_rtx,
16802 gen_rtx_MEM (Pmode, sp_reg_rtx));
16803 sp_offset = 0;
16804 }
1c9c5e43
AM
16805 else if (frame_pointer_needed)
16806 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 16807
9aa86737
AH
16808 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16809 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16810 {
16811 rtx addr, areg, mem;
16812
16813 areg = gen_rtx_REG (Pmode, 0);
16814 emit_move_insn
16815 (areg, GEN_INT (info->altivec_save_offset
16816 + sp_offset
16817 + 16 * (i - info->first_altivec_reg_save)));
16818
16819 /* AltiVec addressing mode is [reg+reg]. */
16820 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16821 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16822
16823 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16824 }
16825 }
16826
e6477eaa
AM
16827 /* Restore VRSAVE if we must do so before adjusting the stack. */
16828 if (TARGET_ALTIVEC
16829 && TARGET_ALTIVEC_VRSAVE
16830 && info->vrsave_mask != 0
1c9c5e43
AM
16831 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16832 || (DEFAULT_ABI != ABI_V4
16833 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
16834 {
16835 rtx addr, mem, reg;
16836
1c9c5e43 16837 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 16838 {
1c9c5e43
AM
16839 if (use_backchain_to_restore_sp)
16840 {
16841 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16842 emit_move_insn (frame_reg_rtx,
16843 gen_rtx_MEM (Pmode, sp_reg_rtx));
16844 sp_offset = 0;
16845 }
16846 else if (frame_pointer_needed)
16847 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
16848 }
16849
16850 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16851 GEN_INT (info->vrsave_save_offset + sp_offset));
16852 mem = gen_frame_mem (SImode, addr);
16853 reg = gen_rtx_REG (SImode, 12);
16854 emit_move_insn (reg, mem);
16855
16856 emit_insn (generate_set_vrsave (reg, info, 1));
16857 }
16858
1c9c5e43
AM
16859 /* If we have a large stack frame, restore the old stack pointer
16860 using the backchain. */
2b2c2fe5
EC
16861 if (use_backchain_to_restore_sp)
16862 {
1c9c5e43 16863 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
16864 {
16865 /* Under V.4, don't reset the stack pointer until after we're done
16866 loading the saved registers. */
16867 if (DEFAULT_ABI == ABI_V4)
16868 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16869
16870 emit_move_insn (frame_reg_rtx,
16871 gen_rtx_MEM (Pmode, sp_reg_rtx));
16872 sp_offset = 0;
16873 }
1c9c5e43
AM
16874 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16875 && DEFAULT_ABI == ABI_V4)
16876 /* frame_reg_rtx has been set up by the altivec restore. */
16877 ;
16878 else
16879 {
16880 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
16881 frame_reg_rtx = sp_reg_rtx;
16882 }
16883 }
16884 /* If we have a frame pointer, we can restore the old stack pointer
16885 from it. */
16886 else if (frame_pointer_needed)
16887 {
16888 frame_reg_rtx = sp_reg_rtx;
16889 if (DEFAULT_ABI == ABI_V4)
16890 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16891
16892 emit_insn (TARGET_32BIT
16893 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16894 GEN_INT (info->total_size))
16895 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16896 GEN_INT (info->total_size)));
16897 sp_offset = 0;
2b2c2fe5 16898 }
45b194f8
AM
16899 else if (info->push_p
16900 && DEFAULT_ABI != ABI_V4
e3b5732b 16901 && !crtl->calls_eh_return)
2b2c2fe5 16902 {
45b194f8
AM
16903 emit_insn (TARGET_32BIT
16904 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16905 GEN_INT (info->total_size))
16906 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16907 GEN_INT (info->total_size)));
16908 sp_offset = 0;
2b2c2fe5
EC
16909 }
16910
e6477eaa 16911 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
16912 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16913 && TARGET_ALTIVEC_ABI
e6477eaa
AM
16914 && info->altivec_size != 0
16915 && (DEFAULT_ABI == ABI_V4
16916 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
16917 {
16918 int i;
16919
16920 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16921 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16922 {
16923 rtx addr, areg, mem;
16924
16925 areg = gen_rtx_REG (Pmode, 0);
16926 emit_move_insn
16927 (areg, GEN_INT (info->altivec_save_offset
16928 + sp_offset
16929 + 16 * (i - info->first_altivec_reg_save)));
16930
16931 /* AltiVec addressing mode is [reg+reg]. */
16932 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
16933 mem = gen_frame_mem (V4SImode, addr);
16934
16935 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16936 }
16937 }
16938
16939 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
16940 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16941 && TARGET_ALTIVEC
e6477eaa
AM
16942 && TARGET_ALTIVEC_VRSAVE
16943 && info->vrsave_mask != 0
16944 && (DEFAULT_ABI == ABI_V4
16945 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
16946 {
16947 rtx addr, mem, reg;
16948
16949 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16950 GEN_INT (info->vrsave_save_offset + sp_offset));
16951 mem = gen_frame_mem (SImode, addr);
16952 reg = gen_rtx_REG (SImode, 12);
16953 emit_move_insn (reg, mem);
16954
16955 emit_insn (generate_set_vrsave (reg, info, 1));
16956 }
16957
f78c3290
NF
16958 /* Get the old lr if we saved it. If we are restoring registers
16959 out-of-line, then the out-of-line routines can do this for us. */
16960 if (restore_lr)
b6c9286a 16961 {
a3170dc6
AH
16962 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16963 info->lr_save_offset + sp_offset);
ba4828e0 16964
9ebbca7d 16965 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16966 }
f676971a 16967
9ebbca7d
GK
16968 /* Get the old cr if we saved it. */
16969 if (info->cr_save_p)
16970 {
16971 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16972 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16973 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16974
9ebbca7d
GK
16975 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16976 }
f676971a 16977
9ebbca7d 16978 /* Set LR here to try to overlap restores below. */
f78c3290 16979 if (restore_lr)
1de43f85 16980 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16981 gen_rtx_REG (Pmode, 0));
f676971a 16982
83720594 16983 /* Load exception handler data registers, if needed. */
e3b5732b 16984 if (crtl->calls_eh_return)
83720594 16985 {
78e1b90d
DE
16986 unsigned int i, regno;
16987
fc4767bb
JJ
16988 if (TARGET_AIX)
16989 {
16990 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16991 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16992 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16993
16994 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16995 }
16996
83720594
RH
16997 for (i = 0; ; ++i)
16998 {
a3170dc6 16999 rtx mem;
83720594
RH
17000
17001 regno = EH_RETURN_DATA_REGNO (i);
17002 if (regno == INVALID_REGNUM)
17003 break;
17004
a3170dc6
AH
17005 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17006 info->ehrd_offset + sp_offset
17007 + reg_size * (int) i);
83720594
RH
17008
17009 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17010 }
17011 }
f676971a 17012
9ebbca7d
GK
17013 /* Restore GPRs. This is done as a PARALLEL if we are using
17014 the load-multiple instructions. */
f78c3290
NF
17015 if (TARGET_SPE_ABI
17016 && info->spe_64bit_regs_used != 0
17017 && info->first_gp_reg_save != 32)
52ff33d0 17018 {
52ff33d0
NF
17019 /* Determine whether we can address all of the registers that need
17020 to be saved with an offset from the stack pointer that fits in
17021 the small const field for SPE memory instructions. */
17022 int spe_regs_addressable_via_sp
f78c3290
NF
17023 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17024 + (32 - info->first_gp_reg_save - 1) * reg_size)
17025 && restoring_GPRs_inline);
52ff33d0
NF
17026 int spe_offset;
17027
17028 if (spe_regs_addressable_via_sp)
45b194f8 17029 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17030 else
17031 {
45b194f8 17032 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17033 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17034 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17035 There's no need to worry here because the static chain is passed
17036 anew to every function. */
f78c3290
NF
17037 int ool_adjust = (restoring_GPRs_inline
17038 ? 0
17039 : (info->first_gp_reg_save
17040 - (FIRST_SAVRES_REGISTER+1))*8);
17041
45b194f8
AM
17042 if (frame_reg_rtx == sp_reg_rtx)
17043 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17044 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17045 GEN_INT (info->spe_gp_save_offset
17046 + sp_offset
17047 - ool_adjust)));
45b194f8
AM
17048 /* Keep the invariant that frame_reg_rtx + sp_offset points
17049 at the top of the stack frame. */
17050 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17051
17052 spe_offset = 0;
17053 }
17054
f78c3290
NF
17055 if (restoring_GPRs_inline)
17056 {
17057 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17058 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17059 {
17060 rtx offset, addr, mem;
52ff33d0 17061
f78c3290
NF
17062 /* We're doing all this to ensure that the immediate offset
17063 fits into the immediate field of 'evldd'. */
17064 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17065
f78c3290
NF
17066 offset = GEN_INT (spe_offset + reg_size * i);
17067 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17068 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17069
f78c3290
NF
17070 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17071 mem);
17072 }
17073 }
17074 else
17075 {
17076 rtx par;
17077
17078 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17079 0, reg_mode,
17080 /*savep=*/false, /*gpr=*/true,
17081 /*exitp=*/true);
17082 emit_jump_insn (par);
17083
17084 /* We don't want anybody else emitting things after we jumped
17085 back. */
17086 return;
17087 }
52ff33d0 17088 }
f78c3290
NF
17089 else if (!restoring_GPRs_inline)
17090 {
17091 /* We are jumping to an out-of-line function. */
17092 bool can_use_exit = info->first_fp_reg_save == 64;
17093 rtx par;
17094
17095 /* Emit stack reset code if we need it. */
17096 if (can_use_exit)
17097 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17098 sp_offset, can_use_exit);
17099 else
17100 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17101 sp_reg_rtx,
17102 GEN_INT (sp_offset - info->fp_size)));
17103
17104 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17105 info->gp_save_offset, reg_mode,
17106 /*savep=*/false, /*gpr=*/true,
17107 /*exitp=*/can_use_exit);
17108
17109 if (can_use_exit)
17110 {
17111 if (info->cr_save_p)
17112 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17113 using_mtcr_multiple);
17114
17115 emit_jump_insn (par);
17116
17117 /* We don't want anybody else emitting things after we jumped
17118 back. */
17119 return;
17120 }
17121 else
17122 emit_insn (par);
17123 }
17124 else if (using_load_multiple)
17125 {
17126 rtvec p;
17127 p = rtvec_alloc (32 - info->first_gp_reg_save);
17128 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17129 {
f676971a
EC
17130 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17131 GEN_INT (info->gp_save_offset
17132 + sp_offset
9ebbca7d 17133 + reg_size * i));
0be76840 17134 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17135
f78c3290
NF
17136 RTVEC_ELT (p, i) =
17137 gen_rtx_SET (VOIDmode,
17138 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17139 mem);
9ebbca7d 17140 }
f78c3290
NF
17141 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17142 }
17143 else
17144 {
17145 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17146 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17147 {
17148 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17149 GEN_INT (info->gp_save_offset
17150 + sp_offset
17151 + reg_size * i));
17152 rtx mem = gen_frame_mem (reg_mode, addr);
17153
17154 emit_move_insn (gen_rtx_REG (reg_mode,
17155 info->first_gp_reg_save + i), mem);
17156 }
17157 }
9878760c 17158
9ebbca7d
GK
17159 /* Restore fpr's if we need to do it without calling a function. */
17160 if (restoring_FPRs_inline)
17161 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17162 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17163 && ! call_used_regs[info->first_fp_reg_save+i]))
17164 {
17165 rtx addr, mem;
17166 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17167 GEN_INT (info->fp_save_offset
17168 + sp_offset
a4f6c312 17169 + 8 * i));
0be76840 17170 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17171
f676971a 17172 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
17173 info->first_fp_reg_save + i),
17174 mem);
17175 }
8d30c4ee 17176
9ebbca7d
GK
17177 /* If we saved cr, restore it here. Just those that were used. */
17178 if (info->cr_save_p)
f78c3290 17179 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17180
9ebbca7d 17181 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17182 have been done. */
f78c3290
NF
17183 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17184 sp_offset, !restoring_FPRs_inline);
b6c9286a 17185
e3b5732b 17186 if (crtl->calls_eh_return)
83720594
RH
17187 {
17188 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17189 emit_insn (TARGET_32BIT
83720594
RH
17190 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17191 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17192 }
17193
9ebbca7d
GK
17194 if (!sibcall)
17195 {
17196 rtvec p;
17197 if (! restoring_FPRs_inline)
f78c3290 17198 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17199 else
17200 p = rtvec_alloc (2);
b6c9286a 17201
e35b9579 17202 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17203 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17204 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17205 : gen_rtx_CLOBBER (VOIDmode,
17206 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17207
17208 /* If we have to restore more than two FP registers, branch to the
17209 restore function. It will return to our caller. */
17210 if (! restoring_FPRs_inline)
17211 {
17212 int i;
f78c3290
NF
17213 rtx sym;
17214
17215 sym = rs6000_savres_routine_sym (info,
17216 /*savep=*/false,
17217 /*gpr=*/false,
17218 /*exitp=*/true);
17219 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17220 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17221 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17222 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17223 {
17224 rtx addr, mem;
17225 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17226 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17227 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17228
f78c3290 17229 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17230 gen_rtx_SET (VOIDmode,
17231 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17232 mem);
b6c9286a
MM
17233 }
17234 }
f676971a 17235
9ebbca7d 17236 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17237 }
9878760c
RK
17238}
17239
17240/* Write function epilogue. */
17241
08c148a8 17242static void
f676971a 17243rs6000_output_function_epilogue (FILE *file,
a2369ed3 17244 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17245{
9ebbca7d 17246 if (! HAVE_epilogue)
9878760c 17247 {
9ebbca7d
GK
17248 rtx insn = get_last_insn ();
17249 /* If the last insn was a BARRIER, we don't have to write anything except
17250 the trace table. */
17251 if (GET_CODE (insn) == NOTE)
17252 insn = prev_nonnote_insn (insn);
17253 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17254 {
9ebbca7d
GK
17255 /* This is slightly ugly, but at least we don't have two
17256 copies of the epilogue-emitting code. */
17257 start_sequence ();
17258
17259 /* A NOTE_INSN_DELETED is supposed to be at the start
17260 and end of the "toplevel" insn chain. */
2e040219 17261 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17262 rs6000_emit_epilogue (FALSE);
2e040219 17263 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17264
a3c9585f 17265 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17266 {
17267 rtx insn;
17268 unsigned addr = 0;
17269 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17270 {
17271 INSN_ADDRESSES_NEW (insn, addr);
17272 addr += 4;
17273 }
17274 }
17275
9ebbca7d 17276 if (TARGET_DEBUG_STACK)
a4f6c312 17277 debug_rtx_list (get_insns (), 100);
c9d691e9 17278 final (get_insns (), file, FALSE);
9ebbca7d 17279 end_sequence ();
4697a36c 17280 }
9878760c 17281 }
b4ac57ab 17282
efdba735
SH
17283#if TARGET_MACHO
17284 macho_branch_islands ();
0e5da0be
GK
17285 /* Mach-O doesn't support labels at the end of objects, so if
17286 it looks like we might want one, insert a NOP. */
17287 {
17288 rtx insn = get_last_insn ();
17289 while (insn
17290 && NOTE_P (insn)
a38e7aa5 17291 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17292 insn = PREV_INSN (insn);
f676971a
EC
17293 if (insn
17294 && (LABEL_P (insn)
0e5da0be 17295 || (NOTE_P (insn)
a38e7aa5 17296 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17297 fputs ("\tnop\n", file);
17298 }
17299#endif
17300
9b30bae2 17301 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17302 on its format.
17303
17304 We don't output a traceback table if -finhibit-size-directive was
17305 used. The documentation for -finhibit-size-directive reads
17306 ``don't output a @code{.size} assembler directive, or anything
17307 else that would cause trouble if the function is split in the
17308 middle, and the two halves are placed at locations far apart in
17309 memory.'' The traceback table has this property, since it
17310 includes the offset from the start of the function to the
4d30c363
MM
17311 traceback table itself.
17312
17313 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17314 different traceback table. */
57ac7be9 17315 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
e3b5732b 17316 && rs6000_traceback != traceback_none && !crtl->is_thunk)
9b30bae2 17317 {
69c75916 17318 const char *fname = NULL;
3ac88239 17319 const char *language_string = lang_hooks.name;
6041bf2f 17320 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17321 int i;
57ac7be9 17322 int optional_tbtab;
8097c268 17323 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17324
17325 if (rs6000_traceback == traceback_full)
17326 optional_tbtab = 1;
17327 else if (rs6000_traceback == traceback_part)
17328 optional_tbtab = 0;
17329 else
17330 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17331
69c75916
AM
17332 if (optional_tbtab)
17333 {
17334 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17335 while (*fname == '.') /* V.4 encodes . in the name */
17336 fname++;
17337
17338 /* Need label immediately before tbtab, so we can compute
17339 its offset from the function start. */
17340 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17341 ASM_OUTPUT_LABEL (file, fname);
17342 }
314fc5a9
ILT
17343
17344 /* The .tbtab pseudo-op can only be used for the first eight
17345 expressions, since it can't handle the possibly variable
17346 length fields that follow. However, if you omit the optional
17347 fields, the assembler outputs zeros for all optional fields
17348 anyways, giving each variable length field is minimum length
17349 (as defined in sys/debug.h). Thus we can not use the .tbtab
17350 pseudo-op at all. */
17351
17352 /* An all-zero word flags the start of the tbtab, for debuggers
17353 that have to find it by searching forward from the entry
17354 point or from the current pc. */
19d2d16f 17355 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17356
17357 /* Tbtab format type. Use format type 0. */
19d2d16f 17358 fputs ("\t.byte 0,", file);
314fc5a9 17359
5fc921c1
DE
17360 /* Language type. Unfortunately, there does not seem to be any
17361 official way to discover the language being compiled, so we
17362 use language_string.
17363 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17364 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17365 a number, so for now use 9. */
5fc921c1 17366 if (! strcmp (language_string, "GNU C"))
314fc5a9 17367 i = 0;
6de9cd9a 17368 else if (! strcmp (language_string, "GNU F77")
7f62878c 17369 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17370 i = 1;
8b83775b 17371 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17372 i = 2;
5fc921c1
DE
17373 else if (! strcmp (language_string, "GNU Ada"))
17374 i = 3;
56438901
AM
17375 else if (! strcmp (language_string, "GNU C++")
17376 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17377 i = 9;
9517ead8
AG
17378 else if (! strcmp (language_string, "GNU Java"))
17379 i = 13;
5fc921c1
DE
17380 else if (! strcmp (language_string, "GNU Objective-C"))
17381 i = 14;
314fc5a9 17382 else
37409796 17383 gcc_unreachable ();
314fc5a9
ILT
17384 fprintf (file, "%d,", i);
17385
17386 /* 8 single bit fields: global linkage (not set for C extern linkage,
17387 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17388 from start of procedure stored in tbtab, internal function, function
17389 has controlled storage, function has no toc, function uses fp,
17390 function logs/aborts fp operations. */
17391 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17392 fprintf (file, "%d,",
17393 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17394
17395 /* 6 bitfields: function is interrupt handler, name present in
17396 proc table, function calls alloca, on condition directives
17397 (controls stack walks, 3 bits), saves condition reg, saves
17398 link reg. */
17399 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17400 set up as a frame pointer, even when there is no alloca call. */
17401 fprintf (file, "%d,",
6041bf2f
DE
17402 ((optional_tbtab << 6)
17403 | ((optional_tbtab & frame_pointer_needed) << 5)
17404 | (info->cr_save_p << 1)
17405 | (info->lr_save_p)));
314fc5a9 17406
6041bf2f 17407 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17408 (6 bits). */
17409 fprintf (file, "%d,",
4697a36c 17410 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17411
17412 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17413 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17414
6041bf2f
DE
17415 if (optional_tbtab)
17416 {
17417 /* Compute the parameter info from the function decl argument
17418 list. */
17419 tree decl;
17420 int next_parm_info_bit = 31;
314fc5a9 17421
6041bf2f
DE
17422 for (decl = DECL_ARGUMENTS (current_function_decl);
17423 decl; decl = TREE_CHAIN (decl))
17424 {
17425 rtx parameter = DECL_INCOMING_RTL (decl);
17426 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17427
6041bf2f
DE
17428 if (GET_CODE (parameter) == REG)
17429 {
ebb109ad 17430 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17431 {
17432 int bits;
17433
17434 float_parms++;
17435
37409796
NS
17436 switch (mode)
17437 {
17438 case SFmode:
e41b2a33 17439 case SDmode:
37409796
NS
17440 bits = 0x2;
17441 break;
17442
17443 case DFmode:
7393f7f8 17444 case DDmode:
37409796 17445 case TFmode:
7393f7f8 17446 case TDmode:
37409796
NS
17447 bits = 0x3;
17448 break;
17449
17450 default:
17451 gcc_unreachable ();
17452 }
6041bf2f
DE
17453
17454 /* If only one bit will fit, don't or in this entry. */
17455 if (next_parm_info_bit > 0)
17456 parm_info |= (bits << (next_parm_info_bit - 1));
17457 next_parm_info_bit -= 2;
17458 }
17459 else
17460 {
17461 fixed_parms += ((GET_MODE_SIZE (mode)
17462 + (UNITS_PER_WORD - 1))
17463 / UNITS_PER_WORD);
17464 next_parm_info_bit -= 1;
17465 }
17466 }
17467 }
17468 }
314fc5a9
ILT
17469
17470 /* Number of fixed point parameters. */
17471 /* This is actually the number of words of fixed point parameters; thus
17472 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17473 fprintf (file, "%d,", fixed_parms);
17474
17475 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17476 all on stack. */
17477 /* This is actually the number of fp registers that hold parameters;
17478 and thus the maximum value is 13. */
17479 /* Set parameters on stack bit if parameters are not in their original
17480 registers, regardless of whether they are on the stack? Xlc
17481 seems to set the bit when not optimizing. */
17482 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17483
6041bf2f
DE
17484 if (! optional_tbtab)
17485 return;
17486
314fc5a9
ILT
17487 /* Optional fields follow. Some are variable length. */
17488
17489 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17490 11 double float. */
17491 /* There is an entry for each parameter in a register, in the order that
17492 they occur in the parameter list. Any intervening arguments on the
17493 stack are ignored. If the list overflows a long (max possible length
17494 34 bits) then completely leave off all elements that don't fit. */
17495 /* Only emit this long if there was at least one parameter. */
17496 if (fixed_parms || float_parms)
17497 fprintf (file, "\t.long %d\n", parm_info);
17498
17499 /* Offset from start of code to tb table. */
19d2d16f 17500 fputs ("\t.long ", file);
314fc5a9 17501 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17502 if (TARGET_AIX)
17503 RS6000_OUTPUT_BASENAME (file, fname);
17504 else
17505 assemble_name (file, fname);
17506 putc ('-', file);
17507 rs6000_output_function_entry (file, fname);
19d2d16f 17508 putc ('\n', file);
314fc5a9
ILT
17509
17510 /* Interrupt handler mask. */
17511 /* Omit this long, since we never set the interrupt handler bit
17512 above. */
17513
17514 /* Number of CTL (controlled storage) anchors. */
17515 /* Omit this long, since the has_ctl bit is never set above. */
17516
17517 /* Displacement into stack of each CTL anchor. */
17518 /* Omit this list of longs, because there are no CTL anchors. */
17519
17520 /* Length of function name. */
69c75916
AM
17521 if (*fname == '*')
17522 ++fname;
296b8152 17523 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17524
17525 /* Function name. */
17526 assemble_string (fname, strlen (fname));
17527
17528 /* Register for alloca automatic storage; this is always reg 31.
17529 Only emit this if the alloca bit was set above. */
17530 if (frame_pointer_needed)
19d2d16f 17531 fputs ("\t.byte 31\n", file);
b1765bde
DE
17532
17533 fputs ("\t.align 2\n", file);
9b30bae2 17534 }
9878760c 17535}
17167fd8 17536\f
a4f6c312
SS
17537/* A C compound statement that outputs the assembler code for a thunk
17538 function, used to implement C++ virtual function calls with
17539 multiple inheritance. The thunk acts as a wrapper around a virtual
17540 function, adjusting the implicit object parameter before handing
17541 control off to the real function.
17542
17543 First, emit code to add the integer DELTA to the location that
17544 contains the incoming first argument. Assume that this argument
17545 contains a pointer, and is the one used to pass the `this' pointer
17546 in C++. This is the incoming argument *before* the function
17547 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17548 values of all other incoming arguments.
17167fd8
MM
17549
17550 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17551 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17552 not touch the return address. Hence returning from FUNCTION will
17553 return to whoever called the current `thunk'.
17167fd8 17554
a4f6c312
SS
17555 The effect must be as if FUNCTION had been called directly with the
17556 adjusted first argument. This macro is responsible for emitting
17557 all of the code for a thunk function; output_function_prologue()
17558 and output_function_epilogue() are not invoked.
17167fd8 17559
a4f6c312
SS
17560 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17561 been extracted from it.) It might possibly be useful on some
17562 targets, but probably not.
17167fd8 17563
a4f6c312
SS
17564 If you do not define this macro, the target-independent code in the
17565 C++ frontend will generate a less efficient heavyweight thunk that
17566 calls FUNCTION instead of jumping to it. The generic approach does
17567 not support varargs. */
17167fd8 17568
3961e8fe 17569static void
f676971a
EC
17570rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17571 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17572 tree function)
17167fd8 17573{
5b71a4e7 17574 rtx this, insn, funexp;
17167fd8 17575
5b71a4e7 17576 reload_completed = 1;
fe3ad572 17577 epilogue_completed = 1;
56a7189a 17578
5b71a4e7 17579 /* Mark the end of the (empty) prologue. */
2e040219 17580 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17581
5b71a4e7
DE
17582 /* Find the "this" pointer. If the function returns a structure,
17583 the structure return pointer is in r3. */
61f71b34 17584 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17585 this = gen_rtx_REG (Pmode, 4);
56a7189a 17586 else
5b71a4e7 17587 this = gen_rtx_REG (Pmode, 3);
17167fd8 17588
5b71a4e7
DE
17589 /* Apply the constant offset, if required. */
17590 if (delta)
17591 {
17592 rtx delta_rtx = GEN_INT (delta);
17593 emit_insn (TARGET_32BIT
17594 ? gen_addsi3 (this, this, delta_rtx)
17595 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17596 }
17597
5b71a4e7
DE
17598 /* Apply the offset from the vtable, if required. */
17599 if (vcall_offset)
17167fd8 17600 {
5b71a4e7
DE
17601 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17602 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17603
5b71a4e7 17604 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17605 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17606 {
17607 emit_insn (TARGET_32BIT
17608 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17609 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17610 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17611 }
17612 else
17613 {
17614 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17615
17616 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17617 }
5b71a4e7
DE
17618 emit_insn (TARGET_32BIT
17619 ? gen_addsi3 (this, this, tmp)
17620 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17621 }
17622
5b71a4e7
DE
17623 /* Generate a tail call to the target function. */
17624 if (!TREE_USED (function))
17625 {
17626 assemble_external (function);
17627 TREE_USED (function) = 1;
17628 }
17629 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17630 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17631
17632#if TARGET_MACHO
ab82a49f 17633 if (MACHOPIC_INDIRECT)
5b71a4e7 17634 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17635#endif
5b71a4e7
DE
17636
17637 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17638 generate sibcall RTL explicitly. */
5b71a4e7
DE
17639 insn = emit_call_insn (
17640 gen_rtx_PARALLEL (VOIDmode,
17641 gen_rtvec (4,
17642 gen_rtx_CALL (VOIDmode,
17643 funexp, const0_rtx),
17644 gen_rtx_USE (VOIDmode, const0_rtx),
17645 gen_rtx_USE (VOIDmode,
17646 gen_rtx_REG (SImode,
1de43f85 17647 LR_REGNO)),
5b71a4e7
DE
17648 gen_rtx_RETURN (VOIDmode))));
17649 SIBLING_CALL_P (insn) = 1;
17650 emit_barrier ();
17651
17652 /* Run just enough of rest_of_compilation to get the insns emitted.
17653 There's not really enough bulk here to make other passes such as
17654 instruction scheduling worth while. Note that use_thunk calls
17655 assemble_start_function and assemble_end_function. */
17656 insn = get_insns ();
55e092c4 17657 insn_locators_alloc ();
5b71a4e7
DE
17658 shorten_branches (insn);
17659 final_start_function (insn, file, 1);
c9d691e9 17660 final (insn, file, 1);
5b71a4e7 17661 final_end_function ();
d7087dd2 17662 free_after_compilation (cfun);
5b71a4e7
DE
17663
17664 reload_completed = 0;
fe3ad572 17665 epilogue_completed = 0;
9ebbca7d 17666}
9ebbca7d
GK
17667\f
17668/* A quick summary of the various types of 'constant-pool tables'
17669 under PowerPC:
17670
f676971a 17671 Target Flags Name One table per
9ebbca7d
GK
17672 AIX (none) AIX TOC object file
17673 AIX -mfull-toc AIX TOC object file
17674 AIX -mminimal-toc AIX minimal TOC translation unit
17675 SVR4/EABI (none) SVR4 SDATA object file
17676 SVR4/EABI -fpic SVR4 pic object file
17677 SVR4/EABI -fPIC SVR4 PIC translation unit
17678 SVR4/EABI -mrelocatable EABI TOC function
17679 SVR4/EABI -maix AIX TOC object file
f676971a 17680 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17681 AIX minimal TOC translation unit
17682
17683 Name Reg. Set by entries contains:
17684 made by addrs? fp? sum?
17685
17686 AIX TOC 2 crt0 as Y option option
17687 AIX minimal TOC 30 prolog gcc Y Y option
17688 SVR4 SDATA 13 crt0 gcc N Y N
17689 SVR4 pic 30 prolog ld Y not yet N
17690 SVR4 PIC 30 prolog gcc Y option option
17691 EABI TOC 30 prolog gcc Y option option
17692
17693*/
17694
9ebbca7d
GK
17695/* Hash functions for the hash table. */
17696
17697static unsigned
a2369ed3 17698rs6000_hash_constant (rtx k)
9ebbca7d 17699{
46b33600
RH
17700 enum rtx_code code = GET_CODE (k);
17701 enum machine_mode mode = GET_MODE (k);
17702 unsigned result = (code << 3) ^ mode;
17703 const char *format;
17704 int flen, fidx;
f676971a 17705
46b33600
RH
17706 format = GET_RTX_FORMAT (code);
17707 flen = strlen (format);
17708 fidx = 0;
9ebbca7d 17709
46b33600
RH
17710 switch (code)
17711 {
17712 case LABEL_REF:
17713 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17714
17715 case CONST_DOUBLE:
17716 if (mode != VOIDmode)
17717 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17718 flen = 2;
17719 break;
17720
17721 case CODE_LABEL:
17722 fidx = 3;
17723 break;
17724
17725 default:
17726 break;
17727 }
9ebbca7d
GK
17728
17729 for (; fidx < flen; fidx++)
17730 switch (format[fidx])
17731 {
17732 case 's':
17733 {
17734 unsigned i, len;
17735 const char *str = XSTR (k, fidx);
17736 len = strlen (str);
17737 result = result * 613 + len;
17738 for (i = 0; i < len; i++)
17739 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17740 break;
17741 }
9ebbca7d
GK
17742 case 'u':
17743 case 'e':
17744 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17745 break;
17746 case 'i':
17747 case 'n':
17748 result = result * 613 + (unsigned) XINT (k, fidx);
17749 break;
17750 case 'w':
17751 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17752 result = result * 613 + (unsigned) XWINT (k, fidx);
17753 else
17754 {
17755 size_t i;
9390387d 17756 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17757 result = result * 613 + (unsigned) (XWINT (k, fidx)
17758 >> CHAR_BIT * i);
17759 }
17760 break;
09501938
DE
17761 case '0':
17762 break;
9ebbca7d 17763 default:
37409796 17764 gcc_unreachable ();
9ebbca7d 17765 }
46b33600 17766
9ebbca7d
GK
17767 return result;
17768}
17769
17770static unsigned
a2369ed3 17771toc_hash_function (const void *hash_entry)
9ebbca7d 17772{
f676971a 17773 const struct toc_hash_struct *thc =
a9098fd0
GK
17774 (const struct toc_hash_struct *) hash_entry;
17775 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17776}
17777
17778/* Compare H1 and H2 for equivalence. */
17779
17780static int
a2369ed3 17781toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17782{
17783 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17784 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17785
a9098fd0
GK
17786 if (((const struct toc_hash_struct *) h1)->key_mode
17787 != ((const struct toc_hash_struct *) h2)->key_mode)
17788 return 0;
17789
5692c7bc 17790 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17791}
17792
28e510bd
MM
17793/* These are the names given by the C++ front-end to vtables, and
17794 vtable-like objects. Ideally, this logic should not be here;
17795 instead, there should be some programmatic way of inquiring as
17796 to whether or not an object is a vtable. */
17797
17798#define VTABLE_NAME_P(NAME) \
9390387d 17799 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17800 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17801 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17802 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17803 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17804
17805void
a2369ed3 17806rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17807{
17808 /* Currently C++ toc references to vtables can be emitted before it
17809 is decided whether the vtable is public or private. If this is
17810 the case, then the linker will eventually complain that there is
f676971a 17811 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17812 we emit the TOC reference to reference the symbol and not the
17813 section. */
17814 const char *name = XSTR (x, 0);
54ee9799 17815
f676971a 17816 if (VTABLE_NAME_P (name))
54ee9799
DE
17817 {
17818 RS6000_OUTPUT_BASENAME (file, name);
17819 }
17820 else
17821 assemble_name (file, name);
28e510bd
MM
17822}
17823
a4f6c312
SS
17824/* Output a TOC entry. We derive the entry name from what is being
17825 written. */
9878760c
RK
17826
17827void
a2369ed3 17828output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17829{
17830 char buf[256];
3cce094d 17831 const char *name = buf;
ec940faa 17832 const char *real_name;
9878760c 17833 rtx base = x;
16fdeb48 17834 HOST_WIDE_INT offset = 0;
9878760c 17835
37409796 17836 gcc_assert (!TARGET_NO_TOC);
4697a36c 17837
9ebbca7d
GK
17838 /* When the linker won't eliminate them, don't output duplicate
17839 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17840 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17841 CODE_LABELs. */
17842 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17843 {
17844 struct toc_hash_struct *h;
17845 void * * found;
f676971a 17846
17211ab5 17847 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17848 time because GGC is not initialized at that point. */
17211ab5 17849 if (toc_hash_table == NULL)
f676971a 17850 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17851 toc_hash_eq, NULL);
17852
5ead67f6 17853 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 17854 h->key = x;
a9098fd0 17855 h->key_mode = mode;
9ebbca7d 17856 h->labelno = labelno;
f676971a 17857
9ebbca7d
GK
17858 found = htab_find_slot (toc_hash_table, h, 1);
17859 if (*found == NULL)
17860 *found = h;
f676971a 17861 else /* This is indeed a duplicate.
9ebbca7d
GK
17862 Set this label equal to that label. */
17863 {
17864 fputs ("\t.set ", file);
17865 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17866 fprintf (file, "%d,", labelno);
17867 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17868 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17869 found)->labelno));
17870 return;
17871 }
17872 }
17873
17874 /* If we're going to put a double constant in the TOC, make sure it's
17875 aligned properly when strict alignment is on. */
ff1720ed
RK
17876 if (GET_CODE (x) == CONST_DOUBLE
17877 && STRICT_ALIGNMENT
a9098fd0 17878 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17879 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17880 ASM_OUTPUT_ALIGN (file, 3);
17881 }
17882
4977bab6 17883 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17884
37c37a57
RK
17885 /* Handle FP constants specially. Note that if we have a minimal
17886 TOC, things we put here aren't actually in the TOC, so we can allow
17887 FP constants. */
00b79d54
BE
17888 if (GET_CODE (x) == CONST_DOUBLE &&
17889 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17890 {
17891 REAL_VALUE_TYPE rv;
17892 long k[4];
17893
17894 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17895 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17896 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17897 else
17898 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17899
17900 if (TARGET_64BIT)
17901 {
17902 if (TARGET_MINIMAL_TOC)
17903 fputs (DOUBLE_INT_ASM_OP, file);
17904 else
17905 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17906 k[0] & 0xffffffff, k[1] & 0xffffffff,
17907 k[2] & 0xffffffff, k[3] & 0xffffffff);
17908 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17909 k[0] & 0xffffffff, k[1] & 0xffffffff,
17910 k[2] & 0xffffffff, k[3] & 0xffffffff);
17911 return;
17912 }
17913 else
17914 {
17915 if (TARGET_MINIMAL_TOC)
17916 fputs ("\t.long ", file);
17917 else
17918 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17919 k[0] & 0xffffffff, k[1] & 0xffffffff,
17920 k[2] & 0xffffffff, k[3] & 0xffffffff);
17921 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17922 k[0] & 0xffffffff, k[1] & 0xffffffff,
17923 k[2] & 0xffffffff, k[3] & 0xffffffff);
17924 return;
17925 }
17926 }
00b79d54
BE
17927 else if (GET_CODE (x) == CONST_DOUBLE &&
17928 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17929 {
042259f2
DE
17930 REAL_VALUE_TYPE rv;
17931 long k[2];
0adc764e 17932
042259f2 17933 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17934
17935 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17936 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17937 else
17938 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17939
13ded975
DE
17940 if (TARGET_64BIT)
17941 {
17942 if (TARGET_MINIMAL_TOC)
2bfcf297 17943 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17944 else
2f0552b6
AM
17945 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17946 k[0] & 0xffffffff, k[1] & 0xffffffff);
17947 fprintf (file, "0x%lx%08lx\n",
17948 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17949 return;
17950 }
1875cc88 17951 else
13ded975
DE
17952 {
17953 if (TARGET_MINIMAL_TOC)
2bfcf297 17954 fputs ("\t.long ", file);
13ded975 17955 else
2f0552b6
AM
17956 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17957 k[0] & 0xffffffff, k[1] & 0xffffffff);
17958 fprintf (file, "0x%lx,0x%lx\n",
17959 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17960 return;
17961 }
9878760c 17962 }
00b79d54
BE
17963 else if (GET_CODE (x) == CONST_DOUBLE &&
17964 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17965 {
042259f2
DE
17966 REAL_VALUE_TYPE rv;
17967 long l;
9878760c 17968
042259f2 17969 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17970 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17971 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17972 else
17973 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17974
31bfaa0b
DE
17975 if (TARGET_64BIT)
17976 {
17977 if (TARGET_MINIMAL_TOC)
2bfcf297 17978 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17979 else
2f0552b6
AM
17980 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17981 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17982 return;
17983 }
042259f2 17984 else
31bfaa0b
DE
17985 {
17986 if (TARGET_MINIMAL_TOC)
2bfcf297 17987 fputs ("\t.long ", file);
31bfaa0b 17988 else
2f0552b6
AM
17989 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17990 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17991 return;
17992 }
042259f2 17993 }
f176e826 17994 else if (GET_MODE (x) == VOIDmode
a9098fd0 17995 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17996 {
e2c953b6 17997 unsigned HOST_WIDE_INT low;
042259f2
DE
17998 HOST_WIDE_INT high;
17999
18000 if (GET_CODE (x) == CONST_DOUBLE)
18001 {
18002 low = CONST_DOUBLE_LOW (x);
18003 high = CONST_DOUBLE_HIGH (x);
18004 }
18005 else
18006#if HOST_BITS_PER_WIDE_INT == 32
18007 {
18008 low = INTVAL (x);
0858c623 18009 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18010 }
18011#else
18012 {
c4ad648e
AM
18013 low = INTVAL (x) & 0xffffffff;
18014 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18015 }
18016#endif
9878760c 18017
a9098fd0
GK
18018 /* TOC entries are always Pmode-sized, but since this
18019 is a bigendian machine then if we're putting smaller
18020 integer constants in the TOC we have to pad them.
18021 (This is still a win over putting the constants in
18022 a separate constant pool, because then we'd have
02a4ec28
FS
18023 to have both a TOC entry _and_ the actual constant.)
18024
18025 For a 32-bit target, CONST_INT values are loaded and shifted
18026 entirely within `low' and can be stored in one TOC entry. */
18027
37409796
NS
18028 /* It would be easy to make this work, but it doesn't now. */
18029 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18030
18031 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18032 {
18033#if HOST_BITS_PER_WIDE_INT == 32
18034 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18035 POINTER_SIZE, &low, &high, 0);
18036#else
18037 low |= high << 32;
18038 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18039 high = (HOST_WIDE_INT) low >> 32;
18040 low &= 0xffffffff;
18041#endif
18042 }
a9098fd0 18043
13ded975
DE
18044 if (TARGET_64BIT)
18045 {
18046 if (TARGET_MINIMAL_TOC)
2bfcf297 18047 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18048 else
2f0552b6
AM
18049 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18050 (long) high & 0xffffffff, (long) low & 0xffffffff);
18051 fprintf (file, "0x%lx%08lx\n",
18052 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18053 return;
18054 }
1875cc88 18055 else
13ded975 18056 {
02a4ec28
FS
18057 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18058 {
18059 if (TARGET_MINIMAL_TOC)
2bfcf297 18060 fputs ("\t.long ", file);
02a4ec28 18061 else
2bfcf297 18062 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18063 (long) high & 0xffffffff, (long) low & 0xffffffff);
18064 fprintf (file, "0x%lx,0x%lx\n",
18065 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18066 }
13ded975 18067 else
02a4ec28
FS
18068 {
18069 if (TARGET_MINIMAL_TOC)
2bfcf297 18070 fputs ("\t.long ", file);
02a4ec28 18071 else
2f0552b6
AM
18072 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18073 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18074 }
13ded975
DE
18075 return;
18076 }
9878760c
RK
18077 }
18078
18079 if (GET_CODE (x) == CONST)
18080 {
37409796 18081 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18082
9878760c
RK
18083 base = XEXP (XEXP (x, 0), 0);
18084 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18085 }
f676971a 18086
37409796
NS
18087 switch (GET_CODE (base))
18088 {
18089 case SYMBOL_REF:
18090 name = XSTR (base, 0);
18091 break;
18092
18093 case LABEL_REF:
18094 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18095 CODE_LABEL_NUMBER (XEXP (base, 0)));
18096 break;
18097
18098 case CODE_LABEL:
18099 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18100 break;
18101
18102 default:
18103 gcc_unreachable ();
18104 }
9878760c 18105
772c5265 18106 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 18107 if (TARGET_MINIMAL_TOC)
2bfcf297 18108 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18109 else
18110 {
b6c9286a 18111 fprintf (file, "\t.tc %s", real_name);
9878760c 18112
1875cc88 18113 if (offset < 0)
16fdeb48 18114 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18115 else if (offset)
16fdeb48 18116 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18117
19d2d16f 18118 fputs ("[TC],", file);
1875cc88 18119 }
581bc4de
MM
18120
18121 /* Currently C++ toc references to vtables can be emitted before it
18122 is decided whether the vtable is public or private. If this is
18123 the case, then the linker will eventually complain that there is
18124 a TOC reference to an unknown section. Thus, for vtables only,
18125 we emit the TOC reference to reference the symbol and not the
18126 section. */
28e510bd 18127 if (VTABLE_NAME_P (name))
581bc4de 18128 {
54ee9799 18129 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18130 if (offset < 0)
16fdeb48 18131 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18132 else if (offset > 0)
16fdeb48 18133 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18134 }
18135 else
18136 output_addr_const (file, x);
19d2d16f 18137 putc ('\n', file);
9878760c
RK
18138}
18139\f
18140/* Output an assembler pseudo-op to write an ASCII string of N characters
18141 starting at P to FILE.
18142
18143 On the RS/6000, we have to do this using the .byte operation and
18144 write out special characters outside the quoted string.
18145 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18146 so we must artificially break them up early. */
9878760c
RK
18147
18148void
a2369ed3 18149output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18150{
18151 char c;
18152 int i, count_string;
d330fd93
KG
18153 const char *for_string = "\t.byte \"";
18154 const char *for_decimal = "\t.byte ";
18155 const char *to_close = NULL;
9878760c
RK
18156
18157 count_string = 0;
18158 for (i = 0; i < n; i++)
18159 {
18160 c = *p++;
18161 if (c >= ' ' && c < 0177)
18162 {
18163 if (for_string)
18164 fputs (for_string, file);
18165 putc (c, file);
18166
18167 /* Write two quotes to get one. */
18168 if (c == '"')
18169 {
18170 putc (c, file);
18171 ++count_string;
18172 }
18173
18174 for_string = NULL;
18175 for_decimal = "\"\n\t.byte ";
18176 to_close = "\"\n";
18177 ++count_string;
18178
18179 if (count_string >= 512)
18180 {
18181 fputs (to_close, file);
18182
18183 for_string = "\t.byte \"";
18184 for_decimal = "\t.byte ";
18185 to_close = NULL;
18186 count_string = 0;
18187 }
18188 }
18189 else
18190 {
18191 if (for_decimal)
18192 fputs (for_decimal, file);
18193 fprintf (file, "%d", c);
18194
18195 for_string = "\n\t.byte \"";
18196 for_decimal = ", ";
18197 to_close = "\n";
18198 count_string = 0;
18199 }
18200 }
18201
18202 /* Now close the string if we have written one. Then end the line. */
18203 if (to_close)
9ebbca7d 18204 fputs (to_close, file);
9878760c
RK
18205}
18206\f
18207/* Generate a unique section name for FILENAME for a section type
18208 represented by SECTION_DESC. Output goes into BUF.
18209
18210 SECTION_DESC can be any string, as long as it is different for each
18211 possible section type.
18212
18213 We name the section in the same manner as xlc. The name begins with an
18214 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18215 names) with the last period replaced by the string SECTION_DESC. If
18216 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18217 the name. */
9878760c
RK
18218
18219void
f676971a 18220rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18221 const char *section_desc)
9878760c 18222{
9ebbca7d 18223 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18224 char *p;
18225 int len;
9878760c
RK
18226
18227 after_last_slash = filename;
18228 for (q = filename; *q; q++)
11e5fe42
RK
18229 {
18230 if (*q == '/')
18231 after_last_slash = q + 1;
18232 else if (*q == '.')
18233 last_period = q;
18234 }
9878760c 18235
11e5fe42 18236 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18237 *buf = (char *) xmalloc (len);
9878760c
RK
18238
18239 p = *buf;
18240 *p++ = '_';
18241
18242 for (q = after_last_slash; *q; q++)
18243 {
11e5fe42 18244 if (q == last_period)
c4ad648e 18245 {
9878760c
RK
18246 strcpy (p, section_desc);
18247 p += strlen (section_desc);
e3981aab 18248 break;
c4ad648e 18249 }
9878760c 18250
e9a780ec 18251 else if (ISALNUM (*q))
c4ad648e 18252 *p++ = *q;
9878760c
RK
18253 }
18254
11e5fe42 18255 if (last_period == 0)
9878760c
RK
18256 strcpy (p, section_desc);
18257 else
18258 *p = '\0';
18259}
e165f3f0 18260\f
a4f6c312 18261/* Emit profile function. */
411707f4 18262
411707f4 18263void
a2369ed3 18264output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18265{
858081ad
AH
18266 /* Non-standard profiling for kernels, which just saves LR then calls
18267 _mcount without worrying about arg saves. The idea is to change
18268 the function prologue as little as possible as it isn't easy to
18269 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18270 if (TARGET_PROFILE_KERNEL)
18271 return;
18272
8480e480
CC
18273 if (DEFAULT_ABI == ABI_AIX)
18274 {
9739c90c
JJ
18275#ifndef NO_PROFILE_COUNTERS
18276# define NO_PROFILE_COUNTERS 0
18277#endif
f676971a 18278 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
18279 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18280 else
18281 {
18282 char buf[30];
18283 const char *label_name;
18284 rtx fun;
411707f4 18285
9739c90c
JJ
18286 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18287 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18288 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18289
9739c90c
JJ
18290 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18291 fun, Pmode);
18292 }
8480e480 18293 }
ee890fe2
SS
18294 else if (DEFAULT_ABI == ABI_DARWIN)
18295 {
d5fa86ba 18296 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18297 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18298
18299 /* Be conservative and always set this, at least for now. */
e3b5732b 18300 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18301
18302#if TARGET_MACHO
18303 /* For PIC code, set up a stub and collect the caller's address
18304 from r0, which is where the prologue puts it. */
11abc112 18305 if (MACHOPIC_INDIRECT
e3b5732b 18306 && crtl->uses_pic_offset_table)
11abc112 18307 caller_addr_regno = 0;
ee890fe2
SS
18308#endif
18309 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18310 0, VOIDmode, 1,
18311 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18312 }
411707f4
CC
18313}
18314
a4f6c312 18315/* Write function profiler code. */
e165f3f0
RK
18316
18317void
a2369ed3 18318output_function_profiler (FILE *file, int labelno)
e165f3f0 18319{
3daf36a4 18320 char buf[100];
e165f3f0 18321
38c1f2d7 18322 switch (DEFAULT_ABI)
3daf36a4 18323 {
38c1f2d7 18324 default:
37409796 18325 gcc_unreachable ();
38c1f2d7
MM
18326
18327 case ABI_V4:
09eeeacb
AM
18328 if (!TARGET_32BIT)
18329 {
d4ee4d25 18330 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18331 return;
18332 }
ffcfcb5f 18333 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18334 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18335 if (NO_PROFILE_COUNTERS)
18336 {
18337 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18338 reg_names[0], reg_names[1]);
18339 }
18340 else if (TARGET_SECURE_PLT && flag_pic)
18341 {
18342 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18343 reg_names[0], reg_names[1]);
18344 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18345 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18346 reg_names[12], reg_names[12]);
18347 assemble_name (file, buf);
18348 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18349 assemble_name (file, buf);
18350 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18351 }
18352 else if (flag_pic == 1)
38c1f2d7 18353 {
dfdfa60f 18354 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18355 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18356 reg_names[0], reg_names[1]);
17167fd8 18357 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18358 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18359 assemble_name (file, buf);
17167fd8 18360 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18361 }
9ebbca7d 18362 else if (flag_pic > 1)
38c1f2d7 18363 {
71625f3d
AM
18364 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18365 reg_names[0], reg_names[1]);
9ebbca7d 18366 /* Now, we need to get the address of the label. */
71625f3d 18367 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18368 assemble_name (file, buf);
9ebbca7d
GK
18369 fputs ("-.\n1:", file);
18370 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18371 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18372 reg_names[0], reg_names[11]);
18373 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18374 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18375 }
38c1f2d7
MM
18376 else
18377 {
17167fd8 18378 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18379 assemble_name (file, buf);
dfdfa60f 18380 fputs ("@ha\n", file);
71625f3d
AM
18381 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18382 reg_names[0], reg_names[1]);
a260abc9 18383 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18384 assemble_name (file, buf);
17167fd8 18385 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18386 }
18387
50d440bc 18388 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18389 fprintf (file, "\tbl %s%s\n",
18390 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18391 break;
18392
18393 case ABI_AIX:
ee890fe2 18394 case ABI_DARWIN:
ffcfcb5f
AM
18395 if (!TARGET_PROFILE_KERNEL)
18396 {
a3c9585f 18397 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18398 }
18399 else
18400 {
37409796 18401 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18402
18403 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18404 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18405
6de9cd9a 18406 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18407 {
18408 asm_fprintf (file, "\tstd %s,24(%s)\n",
18409 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18410 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18411 asm_fprintf (file, "\tld %s,24(%s)\n",
18412 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18413 }
18414 else
18415 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18416 }
38c1f2d7
MM
18417 break;
18418 }
e165f3f0 18419}
a251ffd0 18420
b54cf83a 18421\f
44cd321e
PS
18422
18423/* The following variable value is the last issued insn. */
18424
18425static rtx last_scheduled_insn;
18426
18427/* The following variable helps to balance issuing of load and
18428 store instructions */
18429
18430static int load_store_pendulum;
18431
b54cf83a
DE
18432/* Power4 load update and store update instructions are cracked into a
18433 load or store and an integer insn which are executed in the same cycle.
18434 Branches have their own dispatch slot which does not count against the
18435 GCC issue rate, but it changes the program flow so there are no other
18436 instructions to issue in this cycle. */
18437
18438static int
f676971a
EC
18439rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18440 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18441 rtx insn, int more)
b54cf83a 18442{
44cd321e 18443 last_scheduled_insn = insn;
b54cf83a
DE
18444 if (GET_CODE (PATTERN (insn)) == USE
18445 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18446 {
18447 cached_can_issue_more = more;
18448 return cached_can_issue_more;
18449 }
18450
18451 if (insn_terminates_group_p (insn, current_group))
18452 {
18453 cached_can_issue_more = 0;
18454 return cached_can_issue_more;
18455 }
b54cf83a 18456
d296e02e
AP
18457 /* If no reservation, but reach here */
18458 if (recog_memoized (insn) < 0)
18459 return more;
18460
ec507f2d 18461 if (rs6000_sched_groups)
b54cf83a 18462 {
cbe26ab8 18463 if (is_microcoded_insn (insn))
44cd321e 18464 cached_can_issue_more = 0;
cbe26ab8 18465 else if (is_cracked_insn (insn))
44cd321e
PS
18466 cached_can_issue_more = more > 2 ? more - 2 : 0;
18467 else
18468 cached_can_issue_more = more - 1;
18469
18470 return cached_can_issue_more;
b54cf83a 18471 }
165b263e 18472
d296e02e
AP
18473 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18474 return 0;
18475
44cd321e
PS
18476 cached_can_issue_more = more - 1;
18477 return cached_can_issue_more;
b54cf83a
DE
18478}
18479
a251ffd0
TG
18480/* Adjust the cost of a scheduling dependency. Return the new cost of
18481 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18482
c237e94a 18483static int
0a4f0294 18484rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18485{
44cd321e 18486 enum attr_type attr_type;
a251ffd0 18487
44cd321e 18488 if (! recog_memoized (insn))
a251ffd0
TG
18489 return 0;
18490
44cd321e 18491 switch (REG_NOTE_KIND (link))
a251ffd0 18492 {
44cd321e
PS
18493 case REG_DEP_TRUE:
18494 {
18495 /* Data dependency; DEP_INSN writes a register that INSN reads
18496 some cycles later. */
18497
18498 /* Separate a load from a narrower, dependent store. */
18499 if (rs6000_sched_groups
18500 && GET_CODE (PATTERN (insn)) == SET
18501 && GET_CODE (PATTERN (dep_insn)) == SET
18502 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18503 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18504 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18505 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18506 return cost + 14;
18507
18508 attr_type = get_attr_type (insn);
18509
18510 switch (attr_type)
18511 {
18512 case TYPE_JMPREG:
18513 /* Tell the first scheduling pass about the latency between
18514 a mtctr and bctr (and mtlr and br/blr). The first
18515 scheduling pass will not know about this latency since
18516 the mtctr instruction, which has the latency associated
18517 to it, will be generated by reload. */
18518 return TARGET_POWER ? 5 : 4;
18519 case TYPE_BRANCH:
18520 /* Leave some extra cycles between a compare and its
18521 dependent branch, to inhibit expensive mispredicts. */
18522 if ((rs6000_cpu_attr == CPU_PPC603
18523 || rs6000_cpu_attr == CPU_PPC604
18524 || rs6000_cpu_attr == CPU_PPC604E
18525 || rs6000_cpu_attr == CPU_PPC620
18526 || rs6000_cpu_attr == CPU_PPC630
18527 || rs6000_cpu_attr == CPU_PPC750
18528 || rs6000_cpu_attr == CPU_PPC7400
18529 || rs6000_cpu_attr == CPU_PPC7450
18530 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18531 || rs6000_cpu_attr == CPU_POWER5
18532 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18533 && recog_memoized (dep_insn)
18534 && (INSN_CODE (dep_insn) >= 0))
982afe02 18535
44cd321e
PS
18536 switch (get_attr_type (dep_insn))
18537 {
18538 case TYPE_CMP:
18539 case TYPE_COMPARE:
18540 case TYPE_DELAYED_COMPARE:
18541 case TYPE_IMUL_COMPARE:
18542 case TYPE_LMUL_COMPARE:
18543 case TYPE_FPCOMPARE:
18544 case TYPE_CR_LOGICAL:
18545 case TYPE_DELAYED_CR:
18546 return cost + 2;
18547 default:
18548 break;
18549 }
18550 break;
18551
18552 case TYPE_STORE:
18553 case TYPE_STORE_U:
18554 case TYPE_STORE_UX:
18555 case TYPE_FPSTORE:
18556 case TYPE_FPSTORE_U:
18557 case TYPE_FPSTORE_UX:
18558 if ((rs6000_cpu == PROCESSOR_POWER6)
18559 && recog_memoized (dep_insn)
18560 && (INSN_CODE (dep_insn) >= 0))
18561 {
18562
18563 if (GET_CODE (PATTERN (insn)) != SET)
18564 /* If this happens, we have to extend this to schedule
18565 optimally. Return default for now. */
18566 return cost;
18567
18568 /* Adjust the cost for the case where the value written
18569 by a fixed point operation is used as the address
18570 gen value on a store. */
18571 switch (get_attr_type (dep_insn))
18572 {
18573 case TYPE_LOAD:
18574 case TYPE_LOAD_U:
18575 case TYPE_LOAD_UX:
18576 case TYPE_CNTLZ:
18577 {
18578 if (! store_data_bypass_p (dep_insn, insn))
18579 return 4;
18580 break;
18581 }
18582 case TYPE_LOAD_EXT:
18583 case TYPE_LOAD_EXT_U:
18584 case TYPE_LOAD_EXT_UX:
18585 case TYPE_VAR_SHIFT_ROTATE:
18586 case TYPE_VAR_DELAYED_COMPARE:
18587 {
18588 if (! store_data_bypass_p (dep_insn, insn))
18589 return 6;
18590 break;
18591 }
18592 case TYPE_INTEGER:
18593 case TYPE_COMPARE:
18594 case TYPE_FAST_COMPARE:
18595 case TYPE_EXTS:
18596 case TYPE_SHIFT:
18597 case TYPE_INSERT_WORD:
18598 case TYPE_INSERT_DWORD:
18599 case TYPE_FPLOAD_U:
18600 case TYPE_FPLOAD_UX:
18601 case TYPE_STORE_U:
18602 case TYPE_STORE_UX:
18603 case TYPE_FPSTORE_U:
18604 case TYPE_FPSTORE_UX:
18605 {
18606 if (! store_data_bypass_p (dep_insn, insn))
18607 return 3;
18608 break;
18609 }
18610 case TYPE_IMUL:
18611 case TYPE_IMUL2:
18612 case TYPE_IMUL3:
18613 case TYPE_LMUL:
18614 case TYPE_IMUL_COMPARE:
18615 case TYPE_LMUL_COMPARE:
18616 {
18617 if (! store_data_bypass_p (dep_insn, insn))
18618 return 17;
18619 break;
18620 }
18621 case TYPE_IDIV:
18622 {
18623 if (! store_data_bypass_p (dep_insn, insn))
18624 return 45;
18625 break;
18626 }
18627 case TYPE_LDIV:
18628 {
18629 if (! store_data_bypass_p (dep_insn, insn))
18630 return 57;
18631 break;
18632 }
18633 default:
18634 break;
18635 }
18636 }
18637 break;
18638
18639 case TYPE_LOAD:
18640 case TYPE_LOAD_U:
18641 case TYPE_LOAD_UX:
18642 case TYPE_LOAD_EXT:
18643 case TYPE_LOAD_EXT_U:
18644 case TYPE_LOAD_EXT_UX:
18645 if ((rs6000_cpu == PROCESSOR_POWER6)
18646 && recog_memoized (dep_insn)
18647 && (INSN_CODE (dep_insn) >= 0))
18648 {
18649
18650 /* Adjust the cost for the case where the value written
18651 by a fixed point instruction is used within the address
18652 gen portion of a subsequent load(u)(x) */
18653 switch (get_attr_type (dep_insn))
18654 {
18655 case TYPE_LOAD:
18656 case TYPE_LOAD_U:
18657 case TYPE_LOAD_UX:
18658 case TYPE_CNTLZ:
18659 {
18660 if (set_to_load_agen (dep_insn, insn))
18661 return 4;
18662 break;
18663 }
18664 case TYPE_LOAD_EXT:
18665 case TYPE_LOAD_EXT_U:
18666 case TYPE_LOAD_EXT_UX:
18667 case TYPE_VAR_SHIFT_ROTATE:
18668 case TYPE_VAR_DELAYED_COMPARE:
18669 {
18670 if (set_to_load_agen (dep_insn, insn))
18671 return 6;
18672 break;
18673 }
18674 case TYPE_INTEGER:
18675 case TYPE_COMPARE:
18676 case TYPE_FAST_COMPARE:
18677 case TYPE_EXTS:
18678 case TYPE_SHIFT:
18679 case TYPE_INSERT_WORD:
18680 case TYPE_INSERT_DWORD:
18681 case TYPE_FPLOAD_U:
18682 case TYPE_FPLOAD_UX:
18683 case TYPE_STORE_U:
18684 case TYPE_STORE_UX:
18685 case TYPE_FPSTORE_U:
18686 case TYPE_FPSTORE_UX:
18687 {
18688 if (set_to_load_agen (dep_insn, insn))
18689 return 3;
18690 break;
18691 }
18692 case TYPE_IMUL:
18693 case TYPE_IMUL2:
18694 case TYPE_IMUL3:
18695 case TYPE_LMUL:
18696 case TYPE_IMUL_COMPARE:
18697 case TYPE_LMUL_COMPARE:
18698 {
18699 if (set_to_load_agen (dep_insn, insn))
18700 return 17;
18701 break;
18702 }
18703 case TYPE_IDIV:
18704 {
18705 if (set_to_load_agen (dep_insn, insn))
18706 return 45;
18707 break;
18708 }
18709 case TYPE_LDIV:
18710 {
18711 if (set_to_load_agen (dep_insn, insn))
18712 return 57;
18713 break;
18714 }
18715 default:
18716 break;
18717 }
18718 }
18719 break;
18720
18721 case TYPE_FPLOAD:
18722 if ((rs6000_cpu == PROCESSOR_POWER6)
18723 && recog_memoized (dep_insn)
18724 && (INSN_CODE (dep_insn) >= 0)
18725 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18726 return 2;
18727
18728 default:
18729 break;
18730 }
c9dbf840 18731
a251ffd0 18732 /* Fall out to return default cost. */
44cd321e
PS
18733 }
18734 break;
18735
18736 case REG_DEP_OUTPUT:
18737 /* Output dependency; DEP_INSN writes a register that INSN writes some
18738 cycles later. */
18739 if ((rs6000_cpu == PROCESSOR_POWER6)
18740 && recog_memoized (dep_insn)
18741 && (INSN_CODE (dep_insn) >= 0))
18742 {
18743 attr_type = get_attr_type (insn);
18744
18745 switch (attr_type)
18746 {
18747 case TYPE_FP:
18748 if (get_attr_type (dep_insn) == TYPE_FP)
18749 return 1;
18750 break;
18751 case TYPE_FPLOAD:
18752 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18753 return 2;
18754 break;
18755 default:
18756 break;
18757 }
18758 }
18759 case REG_DEP_ANTI:
18760 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18761 cycles later. */
18762 return 0;
18763
18764 default:
18765 gcc_unreachable ();
a251ffd0
TG
18766 }
18767
18768 return cost;
18769}
b6c9286a 18770
cbe26ab8 18771/* The function returns a true if INSN is microcoded.
839a4992 18772 Return false otherwise. */
cbe26ab8
DN
18773
18774static bool
18775is_microcoded_insn (rtx insn)
18776{
18777 if (!insn || !INSN_P (insn)
18778 || GET_CODE (PATTERN (insn)) == USE
18779 || GET_CODE (PATTERN (insn)) == CLOBBER)
18780 return false;
18781
d296e02e
AP
18782 if (rs6000_cpu_attr == CPU_CELL)
18783 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18784
ec507f2d 18785 if (rs6000_sched_groups)
cbe26ab8
DN
18786 {
18787 enum attr_type type = get_attr_type (insn);
18788 if (type == TYPE_LOAD_EXT_U
18789 || type == TYPE_LOAD_EXT_UX
18790 || type == TYPE_LOAD_UX
18791 || type == TYPE_STORE_UX
18792 || type == TYPE_MFCR)
c4ad648e 18793 return true;
cbe26ab8
DN
18794 }
18795
18796 return false;
18797}
18798
cbe26ab8
DN
18799/* The function returns true if INSN is cracked into 2 instructions
18800 by the processor (and therefore occupies 2 issue slots). */
18801
18802static bool
18803is_cracked_insn (rtx insn)
18804{
18805 if (!insn || !INSN_P (insn)
18806 || GET_CODE (PATTERN (insn)) == USE
18807 || GET_CODE (PATTERN (insn)) == CLOBBER)
18808 return false;
18809
ec507f2d 18810 if (rs6000_sched_groups)
cbe26ab8
DN
18811 {
18812 enum attr_type type = get_attr_type (insn);
18813 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18814 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18815 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18816 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18817 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18818 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18819 || type == TYPE_IDIV || type == TYPE_LDIV
18820 || type == TYPE_INSERT_WORD)
18821 return true;
cbe26ab8
DN
18822 }
18823
18824 return false;
18825}
18826
18827/* The function returns true if INSN can be issued only from
a3c9585f 18828 the branch slot. */
cbe26ab8
DN
18829
18830static bool
18831is_branch_slot_insn (rtx insn)
18832{
18833 if (!insn || !INSN_P (insn)
18834 || GET_CODE (PATTERN (insn)) == USE
18835 || GET_CODE (PATTERN (insn)) == CLOBBER)
18836 return false;
18837
ec507f2d 18838 if (rs6000_sched_groups)
cbe26ab8
DN
18839 {
18840 enum attr_type type = get_attr_type (insn);
18841 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18842 return true;
cbe26ab8
DN
18843 return false;
18844 }
18845
18846 return false;
18847}
79ae11c4 18848
44cd321e
PS
18849/* The function returns true if out_inst sets a value that is
18850 used in the address generation computation of in_insn */
18851static bool
18852set_to_load_agen (rtx out_insn, rtx in_insn)
18853{
18854 rtx out_set, in_set;
18855
18856 /* For performance reasons, only handle the simple case where
18857 both loads are a single_set. */
18858 out_set = single_set (out_insn);
18859 if (out_set)
18860 {
18861 in_set = single_set (in_insn);
18862 if (in_set)
18863 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18864 }
18865
18866 return false;
18867}
18868
18869/* The function returns true if the target storage location of
18870 out_insn is adjacent to the target storage location of in_insn */
18871/* Return 1 if memory locations are adjacent. */
18872
18873static bool
18874adjacent_mem_locations (rtx insn1, rtx insn2)
18875{
18876
e3a0e200
PB
18877 rtx a = get_store_dest (PATTERN (insn1));
18878 rtx b = get_store_dest (PATTERN (insn2));
18879
44cd321e
PS
18880 if ((GET_CODE (XEXP (a, 0)) == REG
18881 || (GET_CODE (XEXP (a, 0)) == PLUS
18882 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18883 && (GET_CODE (XEXP (b, 0)) == REG
18884 || (GET_CODE (XEXP (b, 0)) == PLUS
18885 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18886 {
f98e8938 18887 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18888 rtx reg0, reg1;
44cd321e
PS
18889
18890 if (GET_CODE (XEXP (a, 0)) == PLUS)
18891 {
18892 reg0 = XEXP (XEXP (a, 0), 0);
18893 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18894 }
18895 else
18896 reg0 = XEXP (a, 0);
18897
18898 if (GET_CODE (XEXP (b, 0)) == PLUS)
18899 {
18900 reg1 = XEXP (XEXP (b, 0), 0);
18901 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18902 }
18903 else
18904 reg1 = XEXP (b, 0);
18905
18906 val_diff = val1 - val0;
18907
18908 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18909 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18910 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18911 }
18912
18913 return false;
18914}
18915
a4f6c312 18916/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18917 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18918 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18919 define this macro if you do not need to adjust the scheduling
18920 priorities of insns. */
bef84347 18921
c237e94a 18922static int
a2369ed3 18923rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18924{
a4f6c312
SS
18925 /* On machines (like the 750) which have asymmetric integer units,
18926 where one integer unit can do multiply and divides and the other
18927 can't, reduce the priority of multiply/divide so it is scheduled
18928 before other integer operations. */
bef84347
VM
18929
18930#if 0
2c3c49de 18931 if (! INSN_P (insn))
bef84347
VM
18932 return priority;
18933
18934 if (GET_CODE (PATTERN (insn)) == USE)
18935 return priority;
18936
18937 switch (rs6000_cpu_attr) {
18938 case CPU_PPC750:
18939 switch (get_attr_type (insn))
18940 {
18941 default:
18942 break;
18943
18944 case TYPE_IMUL:
18945 case TYPE_IDIV:
3cb999d8
DE
18946 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18947 priority, priority);
bef84347
VM
18948 if (priority >= 0 && priority < 0x01000000)
18949 priority >>= 3;
18950 break;
18951 }
18952 }
18953#endif
18954
44cd321e 18955 if (insn_must_be_first_in_group (insn)
79ae11c4 18956 && reload_completed
f676971a 18957 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18958 && rs6000_sched_restricted_insns_priority)
18959 {
18960
c4ad648e
AM
18961 /* Prioritize insns that can be dispatched only in the first
18962 dispatch slot. */
79ae11c4 18963 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18964 /* Attach highest priority to insn. This means that in
18965 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18966 precede 'priority' (critical path) considerations. */
f676971a 18967 return current_sched_info->sched_max_insns_priority;
79ae11c4 18968 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18969 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18970 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18971 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18972 return (priority + 1);
18973 }
79ae11c4 18974
44cd321e
PS
18975 if (rs6000_cpu == PROCESSOR_POWER6
18976 && ((load_store_pendulum == -2 && is_load_insn (insn))
18977 || (load_store_pendulum == 2 && is_store_insn (insn))))
18978 /* Attach highest priority to insn if the scheduler has just issued two
18979 stores and this instruction is a load, or two loads and this instruction
18980 is a store. Power6 wants loads and stores scheduled alternately
18981 when possible */
18982 return current_sched_info->sched_max_insns_priority;
18983
bef84347
VM
18984 return priority;
18985}
18986
d296e02e
AP
18987/* Return true if the instruction is nonpipelined on the Cell. */
18988static bool
18989is_nonpipeline_insn (rtx insn)
18990{
18991 enum attr_type type;
18992 if (!insn || !INSN_P (insn)
18993 || GET_CODE (PATTERN (insn)) == USE
18994 || GET_CODE (PATTERN (insn)) == CLOBBER)
18995 return false;
18996
18997 type = get_attr_type (insn);
18998 if (type == TYPE_IMUL
18999 || type == TYPE_IMUL2
19000 || type == TYPE_IMUL3
19001 || type == TYPE_LMUL
19002 || type == TYPE_IDIV
19003 || type == TYPE_LDIV
19004 || type == TYPE_SDIV
19005 || type == TYPE_DDIV
19006 || type == TYPE_SSQRT
19007 || type == TYPE_DSQRT
19008 || type == TYPE_MFCR
19009 || type == TYPE_MFCRF
19010 || type == TYPE_MFJMPR)
19011 {
19012 return true;
19013 }
19014 return false;
19015}
19016
19017
a4f6c312
SS
19018/* Return how many instructions the machine can issue per cycle. */
19019
c237e94a 19020static int
863d938c 19021rs6000_issue_rate (void)
b6c9286a 19022{
3317bab1
DE
19023 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19024 if (!reload_completed)
19025 return 1;
19026
b6c9286a 19027 switch (rs6000_cpu_attr) {
3cb999d8
DE
19028 case CPU_RIOS1: /* ? */
19029 case CPU_RS64A:
19030 case CPU_PPC601: /* ? */
ed947a96 19031 case CPU_PPC7450:
3cb999d8 19032 return 3;
b54cf83a 19033 case CPU_PPC440:
b6c9286a 19034 case CPU_PPC603:
bef84347 19035 case CPU_PPC750:
ed947a96 19036 case CPU_PPC7400:
be12c2b0 19037 case CPU_PPC8540:
d296e02e 19038 case CPU_CELL:
fa41c305
EW
19039 case CPU_PPCE300C2:
19040 case CPU_PPCE300C3:
edae5fe3 19041 case CPU_PPCE500MC:
f676971a 19042 return 2;
3cb999d8 19043 case CPU_RIOS2:
b6c9286a 19044 case CPU_PPC604:
19684119 19045 case CPU_PPC604E:
b6c9286a 19046 case CPU_PPC620:
3cb999d8 19047 case CPU_PPC630:
b6c9286a 19048 return 4;
cbe26ab8 19049 case CPU_POWER4:
ec507f2d 19050 case CPU_POWER5:
44cd321e 19051 case CPU_POWER6:
cbe26ab8 19052 return 5;
b6c9286a
MM
19053 default:
19054 return 1;
19055 }
19056}
19057
be12c2b0
VM
19058/* Return how many instructions to look ahead for better insn
19059 scheduling. */
19060
19061static int
863d938c 19062rs6000_use_sched_lookahead (void)
be12c2b0
VM
19063{
19064 if (rs6000_cpu_attr == CPU_PPC8540)
19065 return 4;
d296e02e
AP
19066 if (rs6000_cpu_attr == CPU_CELL)
19067 return (reload_completed ? 8 : 0);
be12c2b0
VM
19068 return 0;
19069}
19070
d296e02e
AP
19071/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19072static int
19073rs6000_use_sched_lookahead_guard (rtx insn)
19074{
19075 if (rs6000_cpu_attr != CPU_CELL)
19076 return 1;
19077
19078 if (insn == NULL_RTX || !INSN_P (insn))
19079 abort ();
982afe02 19080
d296e02e
AP
19081 if (!reload_completed
19082 || is_nonpipeline_insn (insn)
19083 || is_microcoded_insn (insn))
19084 return 0;
19085
19086 return 1;
19087}
19088
569fa502
DN
19089/* Determine is PAT refers to memory. */
19090
19091static bool
19092is_mem_ref (rtx pat)
19093{
19094 const char * fmt;
19095 int i, j;
19096 bool ret = false;
19097
1de59bbd
DE
19098 /* stack_tie does not produce any real memory traffic. */
19099 if (GET_CODE (pat) == UNSPEC
19100 && XINT (pat, 1) == UNSPEC_TIE)
19101 return false;
19102
569fa502
DN
19103 if (GET_CODE (pat) == MEM)
19104 return true;
19105
19106 /* Recursively process the pattern. */
19107 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19108
19109 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19110 {
19111 if (fmt[i] == 'e')
19112 ret |= is_mem_ref (XEXP (pat, i));
19113 else if (fmt[i] == 'E')
19114 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19115 ret |= is_mem_ref (XVECEXP (pat, i, j));
19116 }
19117
19118 return ret;
19119}
19120
19121/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19122
569fa502
DN
19123static bool
19124is_load_insn1 (rtx pat)
19125{
19126 if (!pat || pat == NULL_RTX)
19127 return false;
19128
19129 if (GET_CODE (pat) == SET)
19130 return is_mem_ref (SET_SRC (pat));
19131
19132 if (GET_CODE (pat) == PARALLEL)
19133 {
19134 int i;
19135
19136 for (i = 0; i < XVECLEN (pat, 0); i++)
19137 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19138 return true;
19139 }
19140
19141 return false;
19142}
19143
19144/* Determine if INSN loads from memory. */
19145
19146static bool
19147is_load_insn (rtx insn)
19148{
19149 if (!insn || !INSN_P (insn))
19150 return false;
19151
19152 if (GET_CODE (insn) == CALL_INSN)
19153 return false;
19154
19155 return is_load_insn1 (PATTERN (insn));
19156}
19157
19158/* Determine if PAT is a PATTERN of a store insn. */
19159
19160static bool
19161is_store_insn1 (rtx pat)
19162{
19163 if (!pat || pat == NULL_RTX)
19164 return false;
19165
19166 if (GET_CODE (pat) == SET)
19167 return is_mem_ref (SET_DEST (pat));
19168
19169 if (GET_CODE (pat) == PARALLEL)
19170 {
19171 int i;
19172
19173 for (i = 0; i < XVECLEN (pat, 0); i++)
19174 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19175 return true;
19176 }
19177
19178 return false;
19179}
19180
19181/* Determine if INSN stores to memory. */
19182
19183static bool
19184is_store_insn (rtx insn)
19185{
19186 if (!insn || !INSN_P (insn))
19187 return false;
19188
19189 return is_store_insn1 (PATTERN (insn));
19190}
19191
e3a0e200
PB
19192/* Return the dest of a store insn. */
19193
19194static rtx
19195get_store_dest (rtx pat)
19196{
19197 gcc_assert (is_store_insn1 (pat));
19198
19199 if (GET_CODE (pat) == SET)
19200 return SET_DEST (pat);
19201 else if (GET_CODE (pat) == PARALLEL)
19202 {
19203 int i;
19204
19205 for (i = 0; i < XVECLEN (pat, 0); i++)
19206 {
19207 rtx inner_pat = XVECEXP (pat, 0, i);
19208 if (GET_CODE (inner_pat) == SET
19209 && is_mem_ref (SET_DEST (inner_pat)))
19210 return inner_pat;
19211 }
19212 }
19213 /* We shouldn't get here, because we should have either a simple
19214 store insn or a store with update which are covered above. */
19215 gcc_unreachable();
19216}
19217
569fa502
DN
19218/* Returns whether the dependence between INSN and NEXT is considered
19219 costly by the given target. */
19220
19221static bool
b198261f 19222rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19223{
b198261f
MK
19224 rtx insn;
19225 rtx next;
19226
aabcd309 19227 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19228 allow all dependent insns in the same group.
569fa502
DN
19229 This is the most aggressive option. */
19230 if (rs6000_sched_costly_dep == no_dep_costly)
19231 return false;
19232
f676971a 19233 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19234 do not allow dependent instructions in the same group.
19235 This is the most conservative option. */
19236 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19237 return true;
569fa502 19238
b198261f
MK
19239 insn = DEP_PRO (dep);
19240 next = DEP_CON (dep);
19241
f676971a
EC
19242 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19243 && is_load_insn (next)
569fa502
DN
19244 && is_store_insn (insn))
19245 /* Prevent load after store in the same group. */
19246 return true;
19247
19248 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19249 && is_load_insn (next)
569fa502 19250 && is_store_insn (insn)
e2f6ff94 19251 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19252 /* Prevent load after store in the same group if it is a true
19253 dependence. */
569fa502 19254 return true;
f676971a
EC
19255
19256 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19257 and will not be scheduled in the same group. */
19258 if (rs6000_sched_costly_dep <= max_dep_latency
19259 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19260 return true;
19261
19262 return false;
19263}
19264
f676971a 19265/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19266 skipping any "non-active" insns - insns that will not actually occupy
19267 an issue slot. Return NULL_RTX if such an insn is not found. */
19268
19269static rtx
19270get_next_active_insn (rtx insn, rtx tail)
19271{
f489aff8 19272 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19273 return NULL_RTX;
19274
f489aff8 19275 while (1)
cbe26ab8 19276 {
f489aff8
AM
19277 insn = NEXT_INSN (insn);
19278 if (insn == NULL_RTX || insn == tail)
19279 return NULL_RTX;
cbe26ab8 19280
f489aff8
AM
19281 if (CALL_P (insn)
19282 || JUMP_P (insn)
19283 || (NONJUMP_INSN_P (insn)
19284 && GET_CODE (PATTERN (insn)) != USE
19285 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19286 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19287 break;
19288 }
19289 return insn;
cbe26ab8
DN
19290}
19291
44cd321e
PS
19292/* We are about to begin issuing insns for this clock cycle. */
19293
19294static int
19295rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19296 rtx *ready ATTRIBUTE_UNUSED,
19297 int *pn_ready ATTRIBUTE_UNUSED,
19298 int clock_var ATTRIBUTE_UNUSED)
19299{
d296e02e
AP
19300 int n_ready = *pn_ready;
19301
44cd321e
PS
19302 if (sched_verbose)
19303 fprintf (dump, "// rs6000_sched_reorder :\n");
19304
d296e02e
AP
19305 /* Reorder the ready list, if the second to last ready insn
19306 is a nonepipeline insn. */
19307 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19308 {
19309 if (is_nonpipeline_insn (ready[n_ready - 1])
19310 && (recog_memoized (ready[n_ready - 2]) > 0))
19311 /* Simply swap first two insns. */
19312 {
19313 rtx tmp = ready[n_ready - 1];
19314 ready[n_ready - 1] = ready[n_ready - 2];
19315 ready[n_ready - 2] = tmp;
19316 }
19317 }
19318
44cd321e
PS
19319 if (rs6000_cpu == PROCESSOR_POWER6)
19320 load_store_pendulum = 0;
19321
19322 return rs6000_issue_rate ();
19323}
19324
19325/* Like rs6000_sched_reorder, but called after issuing each insn. */
19326
19327static int
19328rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19329 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19330{
19331 if (sched_verbose)
19332 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19333
19334 /* For Power6, we need to handle some special cases to try and keep the
19335 store queue from overflowing and triggering expensive flushes.
19336
19337 This code monitors how load and store instructions are being issued
19338 and skews the ready list one way or the other to increase the likelihood
19339 that a desired instruction is issued at the proper time.
19340
19341 A couple of things are done. First, we maintain a "load_store_pendulum"
19342 to track the current state of load/store issue.
19343
19344 - If the pendulum is at zero, then no loads or stores have been
19345 issued in the current cycle so we do nothing.
19346
19347 - If the pendulum is 1, then a single load has been issued in this
19348 cycle and we attempt to locate another load in the ready list to
19349 issue with it.
19350
2f8e468b 19351 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19352 issued in this cycle, so we increase the priority of the first load
19353 in the ready list to increase it's likelihood of being chosen first
19354 in the next cycle.
19355
19356 - If the pendulum is -1, then a single store has been issued in this
19357 cycle and we attempt to locate another store in the ready list to
19358 issue with it, preferring a store to an adjacent memory location to
19359 facilitate store pairing in the store queue.
19360
19361 - If the pendulum is 2, then two loads have already been
19362 issued in this cycle, so we increase the priority of the first store
19363 in the ready list to increase it's likelihood of being chosen first
19364 in the next cycle.
19365
19366 - If the pendulum < -2 or > 2, then do nothing.
19367
19368 Note: This code covers the most common scenarios. There exist non
19369 load/store instructions which make use of the LSU and which
19370 would need to be accounted for to strictly model the behavior
19371 of the machine. Those instructions are currently unaccounted
19372 for to help minimize compile time overhead of this code.
19373 */
19374 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19375 {
19376 int pos;
19377 int i;
19378 rtx tmp;
19379
19380 if (is_store_insn (last_scheduled_insn))
19381 /* Issuing a store, swing the load_store_pendulum to the left */
19382 load_store_pendulum--;
19383 else if (is_load_insn (last_scheduled_insn))
19384 /* Issuing a load, swing the load_store_pendulum to the right */
19385 load_store_pendulum++;
19386 else
19387 return cached_can_issue_more;
19388
19389 /* If the pendulum is balanced, or there is only one instruction on
19390 the ready list, then all is well, so return. */
19391 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19392 return cached_can_issue_more;
19393
19394 if (load_store_pendulum == 1)
19395 {
19396 /* A load has been issued in this cycle. Scan the ready list
19397 for another load to issue with it */
19398 pos = *pn_ready-1;
19399
19400 while (pos >= 0)
19401 {
19402 if (is_load_insn (ready[pos]))
19403 {
19404 /* Found a load. Move it to the head of the ready list,
19405 and adjust it's priority so that it is more likely to
19406 stay there */
19407 tmp = ready[pos];
19408 for (i=pos; i<*pn_ready-1; i++)
19409 ready[i] = ready[i + 1];
19410 ready[*pn_ready-1] = tmp;
19411 if INSN_PRIORITY_KNOWN (tmp)
19412 INSN_PRIORITY (tmp)++;
19413 break;
19414 }
19415 pos--;
19416 }
19417 }
19418 else if (load_store_pendulum == -2)
19419 {
19420 /* Two stores have been issued in this cycle. Increase the
19421 priority of the first load in the ready list to favor it for
19422 issuing in the next cycle. */
19423 pos = *pn_ready-1;
19424
19425 while (pos >= 0)
19426 {
19427 if (is_load_insn (ready[pos])
19428 && INSN_PRIORITY_KNOWN (ready[pos]))
19429 {
19430 INSN_PRIORITY (ready[pos])++;
19431
19432 /* Adjust the pendulum to account for the fact that a load
19433 was found and increased in priority. This is to prevent
19434 increasing the priority of multiple loads */
19435 load_store_pendulum--;
19436
19437 break;
19438 }
19439 pos--;
19440 }
19441 }
19442 else if (load_store_pendulum == -1)
19443 {
19444 /* A store has been issued in this cycle. Scan the ready list for
19445 another store to issue with it, preferring a store to an adjacent
19446 memory location */
19447 int first_store_pos = -1;
19448
19449 pos = *pn_ready-1;
19450
19451 while (pos >= 0)
19452 {
19453 if (is_store_insn (ready[pos]))
19454 {
19455 /* Maintain the index of the first store found on the
19456 list */
19457 if (first_store_pos == -1)
19458 first_store_pos = pos;
19459
19460 if (is_store_insn (last_scheduled_insn)
19461 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19462 {
19463 /* Found an adjacent store. Move it to the head of the
19464 ready list, and adjust it's priority so that it is
19465 more likely to stay there */
19466 tmp = ready[pos];
19467 for (i=pos; i<*pn_ready-1; i++)
19468 ready[i] = ready[i + 1];
19469 ready[*pn_ready-1] = tmp;
19470 if INSN_PRIORITY_KNOWN (tmp)
19471 INSN_PRIORITY (tmp)++;
19472 first_store_pos = -1;
19473
19474 break;
19475 };
19476 }
19477 pos--;
19478 }
19479
19480 if (first_store_pos >= 0)
19481 {
19482 /* An adjacent store wasn't found, but a non-adjacent store was,
19483 so move the non-adjacent store to the front of the ready
19484 list, and adjust its priority so that it is more likely to
19485 stay there. */
19486 tmp = ready[first_store_pos];
19487 for (i=first_store_pos; i<*pn_ready-1; i++)
19488 ready[i] = ready[i + 1];
19489 ready[*pn_ready-1] = tmp;
19490 if INSN_PRIORITY_KNOWN (tmp)
19491 INSN_PRIORITY (tmp)++;
19492 }
19493 }
19494 else if (load_store_pendulum == 2)
19495 {
19496 /* Two loads have been issued in this cycle. Increase the priority
19497 of the first store in the ready list to favor it for issuing in
19498 the next cycle. */
19499 pos = *pn_ready-1;
19500
19501 while (pos >= 0)
19502 {
19503 if (is_store_insn (ready[pos])
19504 && INSN_PRIORITY_KNOWN (ready[pos]))
19505 {
19506 INSN_PRIORITY (ready[pos])++;
19507
19508 /* Adjust the pendulum to account for the fact that a store
19509 was found and increased in priority. This is to prevent
19510 increasing the priority of multiple stores */
19511 load_store_pendulum++;
19512
19513 break;
19514 }
19515 pos--;
19516 }
19517 }
19518 }
19519
19520 return cached_can_issue_more;
19521}
19522
839a4992 19523/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19524 of group WHICH_GROUP.
19525
19526 If WHICH_GROUP == current_group, this function will return true if INSN
19527 causes the termination of the current group (i.e, the dispatch group to
19528 which INSN belongs). This means that INSN will be the last insn in the
19529 group it belongs to.
19530
19531 If WHICH_GROUP == previous_group, this function will return true if INSN
19532 causes the termination of the previous group (i.e, the dispatch group that
19533 precedes the group to which INSN belongs). This means that INSN will be
19534 the first insn in the group it belongs to). */
19535
19536static bool
19537insn_terminates_group_p (rtx insn, enum group_termination which_group)
19538{
44cd321e 19539 bool first, last;
cbe26ab8
DN
19540
19541 if (! insn)
19542 return false;
569fa502 19543
44cd321e
PS
19544 first = insn_must_be_first_in_group (insn);
19545 last = insn_must_be_last_in_group (insn);
cbe26ab8 19546
44cd321e 19547 if (first && last)
cbe26ab8
DN
19548 return true;
19549
19550 if (which_group == current_group)
44cd321e 19551 return last;
cbe26ab8 19552 else if (which_group == previous_group)
44cd321e
PS
19553 return first;
19554
19555 return false;
19556}
19557
19558
19559static bool
19560insn_must_be_first_in_group (rtx insn)
19561{
19562 enum attr_type type;
19563
19564 if (!insn
19565 || insn == NULL_RTX
19566 || GET_CODE (insn) == NOTE
19567 || GET_CODE (PATTERN (insn)) == USE
19568 || GET_CODE (PATTERN (insn)) == CLOBBER)
19569 return false;
19570
19571 switch (rs6000_cpu)
cbe26ab8 19572 {
44cd321e
PS
19573 case PROCESSOR_POWER5:
19574 if (is_cracked_insn (insn))
19575 return true;
19576 case PROCESSOR_POWER4:
19577 if (is_microcoded_insn (insn))
19578 return true;
19579
19580 if (!rs6000_sched_groups)
19581 return false;
19582
19583 type = get_attr_type (insn);
19584
19585 switch (type)
19586 {
19587 case TYPE_MFCR:
19588 case TYPE_MFCRF:
19589 case TYPE_MTCR:
19590 case TYPE_DELAYED_CR:
19591 case TYPE_CR_LOGICAL:
19592 case TYPE_MTJMPR:
19593 case TYPE_MFJMPR:
19594 case TYPE_IDIV:
19595 case TYPE_LDIV:
19596 case TYPE_LOAD_L:
19597 case TYPE_STORE_C:
19598 case TYPE_ISYNC:
19599 case TYPE_SYNC:
19600 return true;
19601 default:
19602 break;
19603 }
19604 break;
19605 case PROCESSOR_POWER6:
19606 type = get_attr_type (insn);
19607
19608 switch (type)
19609 {
19610 case TYPE_INSERT_DWORD:
19611 case TYPE_EXTS:
19612 case TYPE_CNTLZ:
19613 case TYPE_SHIFT:
19614 case TYPE_VAR_SHIFT_ROTATE:
19615 case TYPE_TRAP:
19616 case TYPE_IMUL:
19617 case TYPE_IMUL2:
19618 case TYPE_IMUL3:
19619 case TYPE_LMUL:
19620 case TYPE_IDIV:
19621 case TYPE_INSERT_WORD:
19622 case TYPE_DELAYED_COMPARE:
19623 case TYPE_IMUL_COMPARE:
19624 case TYPE_LMUL_COMPARE:
19625 case TYPE_FPCOMPARE:
19626 case TYPE_MFCR:
19627 case TYPE_MTCR:
19628 case TYPE_MFJMPR:
19629 case TYPE_MTJMPR:
19630 case TYPE_ISYNC:
19631 case TYPE_SYNC:
19632 case TYPE_LOAD_L:
19633 case TYPE_STORE_C:
19634 case TYPE_LOAD_U:
19635 case TYPE_LOAD_UX:
19636 case TYPE_LOAD_EXT_UX:
19637 case TYPE_STORE_U:
19638 case TYPE_STORE_UX:
19639 case TYPE_FPLOAD_U:
19640 case TYPE_FPLOAD_UX:
19641 case TYPE_FPSTORE_U:
19642 case TYPE_FPSTORE_UX:
19643 return true;
19644 default:
19645 break;
19646 }
19647 break;
19648 default:
19649 break;
19650 }
19651
19652 return false;
19653}
19654
19655static bool
19656insn_must_be_last_in_group (rtx insn)
19657{
19658 enum attr_type type;
19659
19660 if (!insn
19661 || insn == NULL_RTX
19662 || GET_CODE (insn) == NOTE
19663 || GET_CODE (PATTERN (insn)) == USE
19664 || GET_CODE (PATTERN (insn)) == CLOBBER)
19665 return false;
19666
19667 switch (rs6000_cpu) {
19668 case PROCESSOR_POWER4:
19669 case PROCESSOR_POWER5:
19670 if (is_microcoded_insn (insn))
19671 return true;
19672
19673 if (is_branch_slot_insn (insn))
19674 return true;
19675
19676 break;
19677 case PROCESSOR_POWER6:
19678 type = get_attr_type (insn);
19679
19680 switch (type)
19681 {
19682 case TYPE_EXTS:
19683 case TYPE_CNTLZ:
19684 case TYPE_SHIFT:
19685 case TYPE_VAR_SHIFT_ROTATE:
19686 case TYPE_TRAP:
19687 case TYPE_IMUL:
19688 case TYPE_IMUL2:
19689 case TYPE_IMUL3:
19690 case TYPE_LMUL:
19691 case TYPE_IDIV:
19692 case TYPE_DELAYED_COMPARE:
19693 case TYPE_IMUL_COMPARE:
19694 case TYPE_LMUL_COMPARE:
19695 case TYPE_FPCOMPARE:
19696 case TYPE_MFCR:
19697 case TYPE_MTCR:
19698 case TYPE_MFJMPR:
19699 case TYPE_MTJMPR:
19700 case TYPE_ISYNC:
19701 case TYPE_SYNC:
19702 case TYPE_LOAD_L:
19703 case TYPE_STORE_C:
19704 return true;
19705 default:
19706 break;
cbe26ab8 19707 }
44cd321e
PS
19708 break;
19709 default:
19710 break;
19711 }
cbe26ab8
DN
19712
19713 return false;
19714}
19715
839a4992 19716/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19717 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19718
19719static bool
19720is_costly_group (rtx *group_insns, rtx next_insn)
19721{
19722 int i;
cbe26ab8
DN
19723 int issue_rate = rs6000_issue_rate ();
19724
19725 for (i = 0; i < issue_rate; i++)
19726 {
e2f6ff94
MK
19727 sd_iterator_def sd_it;
19728 dep_t dep;
cbe26ab8 19729 rtx insn = group_insns[i];
b198261f 19730
cbe26ab8 19731 if (!insn)
c4ad648e 19732 continue;
b198261f 19733
e2f6ff94 19734 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19735 {
b198261f
MK
19736 rtx next = DEP_CON (dep);
19737
19738 if (next == next_insn
19739 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19740 return true;
c4ad648e 19741 }
cbe26ab8
DN
19742 }
19743
19744 return false;
19745}
19746
f676971a 19747/* Utility of the function redefine_groups.
cbe26ab8
DN
19748 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19749 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19750 to keep it "far" (in a separate group) from GROUP_INSNS, following
19751 one of the following schemes, depending on the value of the flag
19752 -minsert_sched_nops = X:
19753 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19754 in order to force NEXT_INSN into a separate group.
f676971a
EC
19755 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19756 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19757 insertion (has a group just ended, how many vacant issue slots remain in the
19758 last group, and how many dispatch groups were encountered so far). */
19759
f676971a 19760static int
c4ad648e
AM
19761force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19762 rtx next_insn, bool *group_end, int can_issue_more,
19763 int *group_count)
cbe26ab8
DN
19764{
19765 rtx nop;
19766 bool force;
19767 int issue_rate = rs6000_issue_rate ();
19768 bool end = *group_end;
19769 int i;
19770
19771 if (next_insn == NULL_RTX)
19772 return can_issue_more;
19773
19774 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19775 return can_issue_more;
19776
19777 force = is_costly_group (group_insns, next_insn);
19778 if (!force)
19779 return can_issue_more;
19780
19781 if (sched_verbose > 6)
19782 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19783 *group_count ,can_issue_more);
cbe26ab8
DN
19784
19785 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19786 {
19787 if (*group_end)
c4ad648e 19788 can_issue_more = 0;
cbe26ab8
DN
19789
19790 /* Since only a branch can be issued in the last issue_slot, it is
19791 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19792 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19793 in this case the last nop will start a new group and the branch
19794 will be forced to the new group. */
cbe26ab8 19795 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19796 can_issue_more--;
cbe26ab8
DN
19797
19798 while (can_issue_more > 0)
c4ad648e 19799 {
9390387d 19800 nop = gen_nop ();
c4ad648e
AM
19801 emit_insn_before (nop, next_insn);
19802 can_issue_more--;
19803 }
cbe26ab8
DN
19804
19805 *group_end = true;
19806 return 0;
f676971a 19807 }
cbe26ab8
DN
19808
19809 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19810 {
19811 int n_nops = rs6000_sched_insert_nops;
19812
f676971a 19813 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19814 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19815 if (can_issue_more == 0)
c4ad648e 19816 can_issue_more = issue_rate;
cbe26ab8
DN
19817 can_issue_more--;
19818 if (can_issue_more == 0)
c4ad648e
AM
19819 {
19820 can_issue_more = issue_rate - 1;
19821 (*group_count)++;
19822 end = true;
19823 for (i = 0; i < issue_rate; i++)
19824 {
19825 group_insns[i] = 0;
19826 }
19827 }
cbe26ab8
DN
19828
19829 while (n_nops > 0)
c4ad648e
AM
19830 {
19831 nop = gen_nop ();
19832 emit_insn_before (nop, next_insn);
19833 if (can_issue_more == issue_rate - 1) /* new group begins */
19834 end = false;
19835 can_issue_more--;
19836 if (can_issue_more == 0)
19837 {
19838 can_issue_more = issue_rate - 1;
19839 (*group_count)++;
19840 end = true;
19841 for (i = 0; i < issue_rate; i++)
19842 {
19843 group_insns[i] = 0;
19844 }
19845 }
19846 n_nops--;
19847 }
cbe26ab8
DN
19848
19849 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19850 can_issue_more++;
cbe26ab8 19851
c4ad648e
AM
19852 /* Is next_insn going to start a new group? */
19853 *group_end
19854 = (end
cbe26ab8
DN
19855 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19856 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19857 || (can_issue_more < issue_rate &&
c4ad648e 19858 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19859 if (*group_end && end)
c4ad648e 19860 (*group_count)--;
cbe26ab8
DN
19861
19862 if (sched_verbose > 6)
c4ad648e
AM
19863 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19864 *group_count, can_issue_more);
f676971a
EC
19865 return can_issue_more;
19866 }
cbe26ab8
DN
19867
19868 return can_issue_more;
19869}
19870
19871/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19872 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19873 form in practice. It tries to achieve this synchronization by forcing the
19874 estimated processor grouping on the compiler (as opposed to the function
19875 'pad_goups' which tries to force the scheduler's grouping on the processor).
19876
19877 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19878 examines the (estimated) dispatch groups that will be formed by the processor
19879 dispatcher. It marks these group boundaries to reflect the estimated
19880 processor grouping, overriding the grouping that the scheduler had marked.
19881 Depending on the value of the flag '-minsert-sched-nops' this function can
19882 force certain insns into separate groups or force a certain distance between
19883 them by inserting nops, for example, if there exists a "costly dependence"
19884 between the insns.
19885
19886 The function estimates the group boundaries that the processor will form as
0fa2e4df 19887 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19888 each insn. A subsequent insn will start a new group if one of the following
19889 4 cases applies:
19890 - no more vacant issue slots remain in the current dispatch group.
19891 - only the last issue slot, which is the branch slot, is vacant, but the next
19892 insn is not a branch.
19893 - only the last 2 or less issue slots, including the branch slot, are vacant,
19894 which means that a cracked insn (which occupies two issue slots) can't be
19895 issued in this group.
f676971a 19896 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19897 start a new group. */
19898
19899static int
19900redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19901{
19902 rtx insn, next_insn;
19903 int issue_rate;
19904 int can_issue_more;
19905 int slot, i;
19906 bool group_end;
19907 int group_count = 0;
19908 rtx *group_insns;
19909
19910 /* Initialize. */
19911 issue_rate = rs6000_issue_rate ();
5ead67f6 19912 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 19913 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19914 {
19915 group_insns[i] = 0;
19916 }
19917 can_issue_more = issue_rate;
19918 slot = 0;
19919 insn = get_next_active_insn (prev_head_insn, tail);
19920 group_end = false;
19921
19922 while (insn != NULL_RTX)
19923 {
19924 slot = (issue_rate - can_issue_more);
19925 group_insns[slot] = insn;
19926 can_issue_more =
c4ad648e 19927 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19928 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19929 can_issue_more = 0;
cbe26ab8
DN
19930
19931 next_insn = get_next_active_insn (insn, tail);
19932 if (next_insn == NULL_RTX)
c4ad648e 19933 return group_count + 1;
cbe26ab8 19934
c4ad648e
AM
19935 /* Is next_insn going to start a new group? */
19936 group_end
19937 = (can_issue_more == 0
19938 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19939 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19940 || (can_issue_more < issue_rate &&
19941 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19942
f676971a 19943 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19944 next_insn, &group_end, can_issue_more,
19945 &group_count);
cbe26ab8
DN
19946
19947 if (group_end)
c4ad648e
AM
19948 {
19949 group_count++;
19950 can_issue_more = 0;
19951 for (i = 0; i < issue_rate; i++)
19952 {
19953 group_insns[i] = 0;
19954 }
19955 }
cbe26ab8
DN
19956
19957 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19958 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19959 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19960 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19961
19962 insn = next_insn;
19963 if (can_issue_more == 0)
c4ad648e
AM
19964 can_issue_more = issue_rate;
19965 } /* while */
cbe26ab8
DN
19966
19967 return group_count;
19968}
19969
19970/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19971 dispatch group boundaries that the scheduler had marked. Pad with nops
19972 any dispatch groups which have vacant issue slots, in order to force the
19973 scheduler's grouping on the processor dispatcher. The function
19974 returns the number of dispatch groups found. */
19975
19976static int
19977pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19978{
19979 rtx insn, next_insn;
19980 rtx nop;
19981 int issue_rate;
19982 int can_issue_more;
19983 int group_end;
19984 int group_count = 0;
19985
19986 /* Initialize issue_rate. */
19987 issue_rate = rs6000_issue_rate ();
19988 can_issue_more = issue_rate;
19989
19990 insn = get_next_active_insn (prev_head_insn, tail);
19991 next_insn = get_next_active_insn (insn, tail);
19992
19993 while (insn != NULL_RTX)
19994 {
19995 can_issue_more =
19996 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19997
19998 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19999
20000 if (next_insn == NULL_RTX)
c4ad648e 20001 break;
cbe26ab8
DN
20002
20003 if (group_end)
c4ad648e
AM
20004 {
20005 /* If the scheduler had marked group termination at this location
20006 (between insn and next_indn), and neither insn nor next_insn will
20007 force group termination, pad the group with nops to force group
20008 termination. */
20009 if (can_issue_more
20010 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20011 && !insn_terminates_group_p (insn, current_group)
20012 && !insn_terminates_group_p (next_insn, previous_group))
20013 {
9390387d 20014 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20015 can_issue_more--;
20016
20017 while (can_issue_more)
20018 {
20019 nop = gen_nop ();
20020 emit_insn_before (nop, next_insn);
20021 can_issue_more--;
20022 }
20023 }
20024
20025 can_issue_more = issue_rate;
20026 group_count++;
20027 }
cbe26ab8
DN
20028
20029 insn = next_insn;
20030 next_insn = get_next_active_insn (insn, tail);
20031 }
20032
20033 return group_count;
20034}
20035
44cd321e
PS
20036/* We're beginning a new block. Initialize data structures as necessary. */
20037
20038static void
20039rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20040 int sched_verbose ATTRIBUTE_UNUSED,
20041 int max_ready ATTRIBUTE_UNUSED)
982afe02 20042{
44cd321e
PS
20043 last_scheduled_insn = NULL_RTX;
20044 load_store_pendulum = 0;
20045}
20046
cbe26ab8
DN
20047/* The following function is called at the end of scheduling BB.
20048 After reload, it inserts nops at insn group bundling. */
20049
20050static void
38f391a5 20051rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20052{
20053 int n_groups;
20054
20055 if (sched_verbose)
20056 fprintf (dump, "=== Finishing schedule.\n");
20057
ec507f2d 20058 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
20059 {
20060 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20061 return;
cbe26ab8
DN
20062
20063 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20064 n_groups = pad_groups (dump, sched_verbose,
20065 current_sched_info->prev_head,
20066 current_sched_info->next_tail);
cbe26ab8 20067 else
c4ad648e
AM
20068 n_groups = redefine_groups (dump, sched_verbose,
20069 current_sched_info->prev_head,
20070 current_sched_info->next_tail);
cbe26ab8
DN
20071
20072 if (sched_verbose >= 6)
20073 {
20074 fprintf (dump, "ngroups = %d\n", n_groups);
20075 print_rtl (dump, current_sched_info->prev_head);
20076 fprintf (dump, "Done finish_sched\n");
20077 }
20078 }
20079}
b6c9286a 20080\f
b6c9286a
MM
20081/* Length in units of the trampoline for entering a nested function. */
20082
20083int
863d938c 20084rs6000_trampoline_size (void)
b6c9286a
MM
20085{
20086 int ret = 0;
20087
20088 switch (DEFAULT_ABI)
20089 {
20090 default:
37409796 20091 gcc_unreachable ();
b6c9286a
MM
20092
20093 case ABI_AIX:
8f802bfb 20094 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20095 break;
20096
4dabc42d 20097 case ABI_DARWIN:
b6c9286a 20098 case ABI_V4:
03a7e1a5 20099 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20100 break;
b6c9286a
MM
20101 }
20102
20103 return ret;
20104}
20105
20106/* Emit RTL insns to initialize the variable parts of a trampoline.
20107 FNADDR is an RTX for the address of the function's pure code.
20108 CXT is an RTX for the static chain value for the function. */
20109
20110void
a2369ed3 20111rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20112{
8bd04c56 20113 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20114 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20115
20116 switch (DEFAULT_ABI)
20117 {
20118 default:
37409796 20119 gcc_unreachable ();
b6c9286a 20120
8bd04c56 20121/* Macros to shorten the code expansions below. */
9613eaff 20122#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20123#define MEM_PLUS(addr,offset) \
9613eaff 20124 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20125
b6c9286a
MM
20126 /* Under AIX, just build the 3 word function descriptor */
20127 case ABI_AIX:
8bd04c56 20128 {
9613eaff
SH
20129 rtx fn_reg = gen_reg_rtx (Pmode);
20130 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20131 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20132 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20133 emit_move_insn (MEM_DEREF (addr), fn_reg);
20134 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20135 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20136 }
b6c9286a
MM
20137 break;
20138
4dabc42d
TC
20139 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20140 case ABI_DARWIN:
b6c9286a 20141 case ABI_V4:
9613eaff 20142 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 20143 FALSE, VOIDmode, 4,
9613eaff 20144 addr, Pmode,
eaf1bcf1 20145 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20146 fnaddr, Pmode,
20147 ctx_reg, Pmode);
b6c9286a 20148 break;
b6c9286a
MM
20149 }
20150
20151 return;
20152}
7509c759
MM
20153
20154\f
91d231cb 20155/* Table of valid machine attributes. */
a4f6c312 20156
91d231cb 20157const struct attribute_spec rs6000_attribute_table[] =
7509c759 20158{
91d231cb 20159 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20160 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20161 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20162 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20163 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20164 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20165#ifdef SUBTARGET_ATTRIBUTE_TABLE
20166 SUBTARGET_ATTRIBUTE_TABLE,
20167#endif
a5c76ee6 20168 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20169};
7509c759 20170
8bb418a3
ZL
20171/* Handle the "altivec" attribute. The attribute may have
20172 arguments as follows:
f676971a 20173
8bb418a3
ZL
20174 __attribute__((altivec(vector__)))
20175 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20176 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20177
20178 and may appear more than once (e.g., 'vector bool char') in a
20179 given declaration. */
20180
20181static tree
f90ac3f0
UP
20182rs6000_handle_altivec_attribute (tree *node,
20183 tree name ATTRIBUTE_UNUSED,
20184 tree args,
8bb418a3
ZL
20185 int flags ATTRIBUTE_UNUSED,
20186 bool *no_add_attrs)
20187{
20188 tree type = *node, result = NULL_TREE;
20189 enum machine_mode mode;
20190 int unsigned_p;
20191 char altivec_type
20192 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20193 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20194 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20195 : '?');
8bb418a3
ZL
20196
20197 while (POINTER_TYPE_P (type)
20198 || TREE_CODE (type) == FUNCTION_TYPE
20199 || TREE_CODE (type) == METHOD_TYPE
20200 || TREE_CODE (type) == ARRAY_TYPE)
20201 type = TREE_TYPE (type);
20202
20203 mode = TYPE_MODE (type);
20204
f90ac3f0
UP
20205 /* Check for invalid AltiVec type qualifiers. */
20206 if (type == long_unsigned_type_node || type == long_integer_type_node)
20207 {
20208 if (TARGET_64BIT)
20209 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20210 else if (rs6000_warn_altivec_long)
d4ee4d25 20211 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20212 }
20213 else if (type == long_long_unsigned_type_node
20214 || type == long_long_integer_type_node)
20215 error ("use of %<long long%> in AltiVec types is invalid");
20216 else if (type == double_type_node)
20217 error ("use of %<double%> in AltiVec types is invalid");
20218 else if (type == long_double_type_node)
20219 error ("use of %<long double%> in AltiVec types is invalid");
20220 else if (type == boolean_type_node)
20221 error ("use of boolean types in AltiVec types is invalid");
20222 else if (TREE_CODE (type) == COMPLEX_TYPE)
20223 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20224 else if (DECIMAL_FLOAT_MODE_P (mode))
20225 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20226
20227 switch (altivec_type)
20228 {
20229 case 'v':
8df83eae 20230 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20231 switch (mode)
20232 {
c4ad648e
AM
20233 case SImode:
20234 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20235 break;
20236 case HImode:
20237 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20238 break;
20239 case QImode:
20240 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20241 break;
20242 case SFmode: result = V4SF_type_node; break;
20243 /* If the user says 'vector int bool', we may be handed the 'bool'
20244 attribute _before_ the 'vector' attribute, and so select the
20245 proper type in the 'b' case below. */
20246 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20247 result = type;
20248 default: break;
8bb418a3
ZL
20249 }
20250 break;
20251 case 'b':
20252 switch (mode)
20253 {
c4ad648e
AM
20254 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20255 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20256 case QImode: case V16QImode: result = bool_V16QI_type_node;
20257 default: break;
8bb418a3
ZL
20258 }
20259 break;
20260 case 'p':
20261 switch (mode)
20262 {
c4ad648e
AM
20263 case V8HImode: result = pixel_V8HI_type_node;
20264 default: break;
8bb418a3
ZL
20265 }
20266 default: break;
20267 }
20268
7958a2a6
FJ
20269 if (result && result != type && TYPE_READONLY (type))
20270 result = build_qualified_type (result, TYPE_QUAL_CONST);
20271
8bb418a3
ZL
20272 *no_add_attrs = true; /* No need to hang on to the attribute. */
20273
f90ac3f0 20274 if (result)
5dc11954 20275 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20276
20277 return NULL_TREE;
20278}
20279
f18eca82
ZL
20280/* AltiVec defines four built-in scalar types that serve as vector
20281 elements; we must teach the compiler how to mangle them. */
20282
20283static const char *
3101faab 20284rs6000_mangle_type (const_tree type)
f18eca82 20285{
608063c3
JB
20286 type = TYPE_MAIN_VARIANT (type);
20287
20288 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20289 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20290 return NULL;
20291
f18eca82
ZL
20292 if (type == bool_char_type_node) return "U6__boolc";
20293 if (type == bool_short_type_node) return "U6__bools";
20294 if (type == pixel_type_node) return "u7__pixel";
20295 if (type == bool_int_type_node) return "U6__booli";
20296
337bde91
DE
20297 /* Mangle IBM extended float long double as `g' (__float128) on
20298 powerpc*-linux where long-double-64 previously was the default. */
20299 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20300 && TARGET_ELF
20301 && TARGET_LONG_DOUBLE_128
20302 && !TARGET_IEEEQUAD)
20303 return "g";
20304
f18eca82
ZL
20305 /* For all other types, use normal C++ mangling. */
20306 return NULL;
20307}
20308
a5c76ee6
ZW
20309/* Handle a "longcall" or "shortcall" attribute; arguments as in
20310 struct attribute_spec.handler. */
a4f6c312 20311
91d231cb 20312static tree
f676971a
EC
20313rs6000_handle_longcall_attribute (tree *node, tree name,
20314 tree args ATTRIBUTE_UNUSED,
20315 int flags ATTRIBUTE_UNUSED,
a2369ed3 20316 bool *no_add_attrs)
91d231cb
JM
20317{
20318 if (TREE_CODE (*node) != FUNCTION_TYPE
20319 && TREE_CODE (*node) != FIELD_DECL
20320 && TREE_CODE (*node) != TYPE_DECL)
20321 {
5c498b10 20322 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20323 IDENTIFIER_POINTER (name));
20324 *no_add_attrs = true;
20325 }
6a4cee5f 20326
91d231cb 20327 return NULL_TREE;
7509c759
MM
20328}
20329
a5c76ee6
ZW
20330/* Set longcall attributes on all functions declared when
20331 rs6000_default_long_calls is true. */
20332static void
a2369ed3 20333rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20334{
20335 if (rs6000_default_long_calls
20336 && (TREE_CODE (type) == FUNCTION_TYPE
20337 || TREE_CODE (type) == METHOD_TYPE))
20338 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20339 NULL_TREE,
20340 TYPE_ATTRIBUTES (type));
16d6f994
EC
20341
20342#if TARGET_MACHO
20343 darwin_set_default_type_attributes (type);
20344#endif
a5c76ee6
ZW
20345}
20346
3cb999d8
DE
20347/* Return a reference suitable for calling a function with the
20348 longcall attribute. */
a4f6c312 20349
9390387d 20350rtx
a2369ed3 20351rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20352{
d330fd93 20353 const char *call_name;
6a4cee5f
MM
20354 tree node;
20355
20356 if (GET_CODE (call_ref) != SYMBOL_REF)
20357 return call_ref;
20358
20359 /* System V adds '.' to the internal name, so skip them. */
20360 call_name = XSTR (call_ref, 0);
20361 if (*call_name == '.')
20362 {
20363 while (*call_name == '.')
20364 call_name++;
20365
20366 node = get_identifier (call_name);
39403d82 20367 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20368 }
20369
20370 return force_reg (Pmode, call_ref);
20371}
7509c759 20372\f
77ccdfed
EC
20373#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20374#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20375#endif
20376
20377/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20378 struct attribute_spec.handler. */
20379static tree
20380rs6000_handle_struct_attribute (tree *node, tree name,
20381 tree args ATTRIBUTE_UNUSED,
20382 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20383{
20384 tree *type = NULL;
20385 if (DECL_P (*node))
20386 {
20387 if (TREE_CODE (*node) == TYPE_DECL)
20388 type = &TREE_TYPE (*node);
20389 }
20390 else
20391 type = node;
20392
20393 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20394 || TREE_CODE (*type) == UNION_TYPE)))
20395 {
20396 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20397 *no_add_attrs = true;
20398 }
20399
20400 else if ((is_attribute_p ("ms_struct", name)
20401 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20402 || ((is_attribute_p ("gcc_struct", name)
20403 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20404 {
20405 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20406 IDENTIFIER_POINTER (name));
20407 *no_add_attrs = true;
20408 }
20409
20410 return NULL_TREE;
20411}
20412
20413static bool
3101faab 20414rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20415{
20416 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20417 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20418 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20419}
20420\f
b64a1b53
RH
20421#ifdef USING_ELFOS_H
20422
d6b5193b 20423/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20424
d6b5193b
RS
20425static void
20426rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20427{
20428 if (DEFAULT_ABI == ABI_AIX
20429 && TARGET_MINIMAL_TOC
20430 && !TARGET_RELOCATABLE)
20431 {
20432 if (!toc_initialized)
20433 {
20434 toc_initialized = 1;
20435 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20436 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20437 fprintf (asm_out_file, "\t.tc ");
20438 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20439 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20440 fprintf (asm_out_file, "\n");
20441
20442 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20443 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20444 fprintf (asm_out_file, " = .+32768\n");
20445 }
20446 else
20447 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20448 }
20449 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20450 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20451 else
20452 {
20453 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20454 if (!toc_initialized)
20455 {
20456 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20457 fprintf (asm_out_file, " = .+32768\n");
20458 toc_initialized = 1;
20459 }
20460 }
20461}
20462
20463/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20464
b64a1b53 20465static void
d6b5193b
RS
20466rs6000_elf_asm_init_sections (void)
20467{
20468 toc_section
20469 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20470
20471 sdata2_section
20472 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20473 SDATA2_SECTION_ASM_OP);
20474}
20475
20476/* Implement TARGET_SELECT_RTX_SECTION. */
20477
20478static section *
f676971a 20479rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20480 unsigned HOST_WIDE_INT align)
7509c759 20481{
a9098fd0 20482 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20483 return toc_section;
7509c759 20484 else
d6b5193b 20485 return default_elf_select_rtx_section (mode, x, align);
7509c759 20486}
d9407988 20487\f
d1908feb
JJ
20488/* For a SYMBOL_REF, set generic flags and then perform some
20489 target-specific processing.
20490
d1908feb
JJ
20491 When the AIX ABI is requested on a non-AIX system, replace the
20492 function name with the real name (with a leading .) rather than the
20493 function descriptor name. This saves a lot of overriding code to
20494 read the prefixes. */
d9407988 20495
fb49053f 20496static void
a2369ed3 20497rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20498{
d1908feb 20499 default_encode_section_info (decl, rtl, first);
b2003250 20500
d1908feb
JJ
20501 if (first
20502 && TREE_CODE (decl) == FUNCTION_DECL
20503 && !TARGET_AIX
20504 && DEFAULT_ABI == ABI_AIX)
d9407988 20505 {
c6a2438a 20506 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20507 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20508 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20509 str[0] = '.';
20510 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20511 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20512 }
d9407988
MM
20513}
20514
21d9bb3f
PB
20515static inline bool
20516compare_section_name (const char *section, const char *template)
20517{
20518 int len;
20519
20520 len = strlen (template);
20521 return (strncmp (section, template, len) == 0
20522 && (section[len] == 0 || section[len] == '.'));
20523}
20524
c1b7d95a 20525bool
3101faab 20526rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20527{
20528 if (rs6000_sdata == SDATA_NONE)
20529 return false;
20530
7482ad25
AF
20531 /* We want to merge strings, so we never consider them small data. */
20532 if (TREE_CODE (decl) == STRING_CST)
20533 return false;
20534
20535 /* Functions are never in the small data area. */
20536 if (TREE_CODE (decl) == FUNCTION_DECL)
20537 return false;
20538
0e5dbd9b
DE
20539 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20540 {
20541 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20542 if (compare_section_name (section, ".sdata")
20543 || compare_section_name (section, ".sdata2")
20544 || compare_section_name (section, ".gnu.linkonce.s")
20545 || compare_section_name (section, ".sbss")
20546 || compare_section_name (section, ".sbss2")
20547 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20548 || strcmp (section, ".PPC.EMB.sdata0") == 0
20549 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20550 return true;
20551 }
20552 else
20553 {
20554 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20555
20556 if (size > 0
307b599c 20557 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20558 /* If it's not public, and we're not going to reference it there,
20559 there's no need to put it in the small data section. */
0e5dbd9b
DE
20560 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20561 return true;
20562 }
20563
20564 return false;
20565}
20566
b91da81f 20567#endif /* USING_ELFOS_H */
aacd3885
RS
20568\f
20569/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20570
aacd3885 20571static bool
3101faab 20572rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20573{
20574 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20575}
a6c2a102 20576\f
000034eb 20577/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20578 ADDR can be effectively incremented by incrementing REG.
20579
20580 r0 is special and we must not select it as an address
20581 register by this routine since our caller will try to
20582 increment the returned register via an "la" instruction. */
000034eb 20583
9390387d 20584rtx
a2369ed3 20585find_addr_reg (rtx addr)
000034eb
DE
20586{
20587 while (GET_CODE (addr) == PLUS)
20588 {
02441cd6
JL
20589 if (GET_CODE (XEXP (addr, 0)) == REG
20590 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20591 addr = XEXP (addr, 0);
02441cd6
JL
20592 else if (GET_CODE (XEXP (addr, 1)) == REG
20593 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20594 addr = XEXP (addr, 1);
20595 else if (CONSTANT_P (XEXP (addr, 0)))
20596 addr = XEXP (addr, 1);
20597 else if (CONSTANT_P (XEXP (addr, 1)))
20598 addr = XEXP (addr, 0);
20599 else
37409796 20600 gcc_unreachable ();
000034eb 20601 }
37409796
NS
20602 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20603 return addr;
000034eb
DE
20604}
20605
a6c2a102 20606void
a2369ed3 20607rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20608{
20609 fatal_insn ("bad address", op);
20610}
c8023011 20611
ee890fe2
SS
20612#if TARGET_MACHO
20613
efdba735 20614static tree branch_island_list = 0;
ee890fe2 20615
efdba735
SH
20616/* Remember to generate a branch island for far calls to the given
20617 function. */
ee890fe2 20618
f676971a 20619static void
c4ad648e
AM
20620add_compiler_branch_island (tree label_name, tree function_name,
20621 int line_number)
ee890fe2 20622{
efdba735 20623 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20624 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20625 TREE_CHAIN (branch_island) = branch_island_list;
20626 branch_island_list = branch_island;
ee890fe2
SS
20627}
20628
efdba735
SH
20629#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20630#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20631#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20632 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20633
efdba735
SH
20634/* Generate far-jump branch islands for everything on the
20635 branch_island_list. Invoked immediately after the last instruction
20636 of the epilogue has been emitted; the branch-islands must be
20637 appended to, and contiguous with, the function body. Mach-O stubs
20638 are generated in machopic_output_stub(). */
ee890fe2 20639
efdba735
SH
20640static void
20641macho_branch_islands (void)
20642{
20643 char tmp_buf[512];
20644 tree branch_island;
20645
20646 for (branch_island = branch_island_list;
20647 branch_island;
20648 branch_island = TREE_CHAIN (branch_island))
20649 {
20650 const char *label =
20651 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20652 const char *name =
11abc112 20653 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20654 char name_buf[512];
20655 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20656 if (name[0] == '*' || name[0] == '&')
20657 strcpy (name_buf, name+1);
20658 else
20659 {
20660 name_buf[0] = '_';
20661 strcpy (name_buf+1, name);
20662 }
20663 strcpy (tmp_buf, "\n");
20664 strcat (tmp_buf, label);
ee890fe2 20665#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20666 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20667 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20668#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20669 if (flag_pic)
20670 {
20671 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20672 strcat (tmp_buf, label);
20673 strcat (tmp_buf, "_pic\n");
20674 strcat (tmp_buf, label);
20675 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20676
efdba735
SH
20677 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20678 strcat (tmp_buf, name_buf);
20679 strcat (tmp_buf, " - ");
20680 strcat (tmp_buf, label);
20681 strcat (tmp_buf, "_pic)\n");
f676971a 20682
efdba735 20683 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20684
efdba735
SH
20685 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20686 strcat (tmp_buf, name_buf);
20687 strcat (tmp_buf, " - ");
20688 strcat (tmp_buf, label);
20689 strcat (tmp_buf, "_pic)\n");
f676971a 20690
efdba735
SH
20691 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20692 }
20693 else
20694 {
20695 strcat (tmp_buf, ":\nlis r12,hi16(");
20696 strcat (tmp_buf, name_buf);
20697 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20698 strcat (tmp_buf, name_buf);
20699 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20700 }
20701 output_asm_insn (tmp_buf, 0);
ee890fe2 20702#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20703 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20704 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20705#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20706 }
ee890fe2 20707
efdba735 20708 branch_island_list = 0;
ee890fe2
SS
20709}
20710
20711/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20712 already there or not. */
20713
efdba735 20714static int
a2369ed3 20715no_previous_def (tree function_name)
ee890fe2 20716{
efdba735
SH
20717 tree branch_island;
20718 for (branch_island = branch_island_list;
20719 branch_island;
20720 branch_island = TREE_CHAIN (branch_island))
20721 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20722 return 0;
20723 return 1;
20724}
20725
20726/* GET_PREV_LABEL gets the label name from the previous definition of
20727 the function. */
20728
efdba735 20729static tree
a2369ed3 20730get_prev_label (tree function_name)
ee890fe2 20731{
efdba735
SH
20732 tree branch_island;
20733 for (branch_island = branch_island_list;
20734 branch_island;
20735 branch_island = TREE_CHAIN (branch_island))
20736 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20737 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20738 return 0;
20739}
20740
75b1b789
MS
20741#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20742#define DARWIN_LINKER_GENERATES_ISLANDS 0
20743#endif
20744
20745/* KEXTs still need branch islands. */
20746#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20747 || flag_mkernel || flag_apple_kext)
20748
ee890fe2 20749/* INSN is either a function call or a millicode call. It may have an
f676971a 20750 unconditional jump in its delay slot.
ee890fe2
SS
20751
20752 CALL_DEST is the routine we are calling. */
20753
20754char *
c4ad648e
AM
20755output_call (rtx insn, rtx *operands, int dest_operand_number,
20756 int cookie_operand_number)
ee890fe2
SS
20757{
20758 static char buf[256];
75b1b789
MS
20759 if (DARWIN_GENERATE_ISLANDS
20760 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20761 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20762 {
20763 tree labelname;
efdba735 20764 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20765
ee890fe2
SS
20766 if (no_previous_def (funname))
20767 {
ee890fe2
SS
20768 rtx label_rtx = gen_label_rtx ();
20769 char *label_buf, temp_buf[256];
20770 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20771 CODE_LABEL_NUMBER (label_rtx));
20772 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20773 labelname = get_identifier (label_buf);
a38e7aa5 20774 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20775 }
20776 else
20777 labelname = get_prev_label (funname);
20778
efdba735
SH
20779 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20780 instruction will reach 'foo', otherwise link as 'bl L42'".
20781 "L42" should be a 'branch island', that will do a far jump to
20782 'foo'. Branch islands are generated in
20783 macho_branch_islands(). */
ee890fe2 20784 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20785 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20786 }
20787 else
efdba735
SH
20788 sprintf (buf, "bl %%z%d", dest_operand_number);
20789 return buf;
ee890fe2
SS
20790}
20791
ee890fe2
SS
20792/* Generate PIC and indirect symbol stubs. */
20793
20794void
a2369ed3 20795machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20796{
20797 unsigned int length;
a4f6c312
SS
20798 char *symbol_name, *lazy_ptr_name;
20799 char *local_label_0;
ee890fe2
SS
20800 static int label = 0;
20801
df56a27f 20802 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20803 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20804
ee890fe2 20805
ee890fe2 20806 length = strlen (symb);
5ead67f6 20807 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20808 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20809
5ead67f6 20810 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20811 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20812
ee890fe2 20813 if (flag_pic == 2)
56c779bc 20814 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20815 else
56c779bc 20816 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20817
20818 if (flag_pic == 2)
20819 {
d974312d
DJ
20820 fprintf (file, "\t.align 5\n");
20821
20822 fprintf (file, "%s:\n", stub);
20823 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20824
876455fa 20825 label++;
5ead67f6 20826 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 20827 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20828
ee890fe2
SS
20829 fprintf (file, "\tmflr r0\n");
20830 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20831 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20832 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20833 lazy_ptr_name, local_label_0);
20834 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20835 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20836 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20837 lazy_ptr_name, local_label_0);
20838 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20839 fprintf (file, "\tbctr\n");
20840 }
20841 else
d974312d
DJ
20842 {
20843 fprintf (file, "\t.align 4\n");
20844
20845 fprintf (file, "%s:\n", stub);
20846 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20847
20848 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20849 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20850 (TARGET_64BIT ? "ldu" : "lwzu"),
20851 lazy_ptr_name);
d974312d
DJ
20852 fprintf (file, "\tmtctr r12\n");
20853 fprintf (file, "\tbctr\n");
20854 }
f676971a 20855
56c779bc 20856 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20857 fprintf (file, "%s:\n", lazy_ptr_name);
20858 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20859 fprintf (file, "%sdyld_stub_binding_helper\n",
20860 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20861}
20862
20863/* Legitimize PIC addresses. If the address is already
20864 position-independent, we return ORIG. Newly generated
20865 position-independent addresses go into a reg. This is REG if non
20866 zero, otherwise we allocate register(s) as necessary. */
20867
4fbbe694 20868#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20869
20870rtx
f676971a 20871rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20872 rtx reg)
ee890fe2
SS
20873{
20874 rtx base, offset;
20875
20876 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20877 reg = gen_reg_rtx (Pmode);
20878
20879 if (GET_CODE (orig) == CONST)
20880 {
37409796
NS
20881 rtx reg_temp;
20882
ee890fe2
SS
20883 if (GET_CODE (XEXP (orig, 0)) == PLUS
20884 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20885 return orig;
20886
37409796 20887 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20888
37409796
NS
20889 /* Use a different reg for the intermediate value, as
20890 it will be marked UNCHANGING. */
b3a13419 20891 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20892 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20893 Pmode, reg_temp);
20894 offset =
20895 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20896 Pmode, reg);
bb8df8a6 20897
ee890fe2
SS
20898 if (GET_CODE (offset) == CONST_INT)
20899 {
20900 if (SMALL_INT (offset))
ed8908e7 20901 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20902 else if (! reload_in_progress && ! reload_completed)
20903 offset = force_reg (Pmode, offset);
20904 else
c859cda6
DJ
20905 {
20906 rtx mem = force_const_mem (Pmode, orig);
20907 return machopic_legitimize_pic_address (mem, Pmode, reg);
20908 }
ee890fe2 20909 }
f1c25d3b 20910 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20911 }
20912
20913 /* Fall back on generic machopic code. */
20914 return machopic_legitimize_pic_address (orig, mode, reg);
20915}
20916
c4e18b1c
GK
20917/* Output a .machine directive for the Darwin assembler, and call
20918 the generic start_file routine. */
20919
20920static void
20921rs6000_darwin_file_start (void)
20922{
94ff898d 20923 static const struct
c4e18b1c
GK
20924 {
20925 const char *arg;
20926 const char *name;
20927 int if_set;
20928 } mapping[] = {
55dbfb48 20929 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20930 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20931 { "power4", "ppc970", 0 },
20932 { "G5", "ppc970", 0 },
20933 { "7450", "ppc7450", 0 },
20934 { "7400", "ppc7400", MASK_ALTIVEC },
20935 { "G4", "ppc7400", 0 },
20936 { "750", "ppc750", 0 },
20937 { "740", "ppc750", 0 },
20938 { "G3", "ppc750", 0 },
20939 { "604e", "ppc604e", 0 },
20940 { "604", "ppc604", 0 },
20941 { "603e", "ppc603", 0 },
20942 { "603", "ppc603", 0 },
20943 { "601", "ppc601", 0 },
20944 { NULL, "ppc", 0 } };
20945 const char *cpu_id = "";
20946 size_t i;
94ff898d 20947
9390387d 20948 rs6000_file_start ();
192d0f89 20949 darwin_file_start ();
c4e18b1c
GK
20950
20951 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20952 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20953 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20954 && rs6000_select[i].string[0] != '\0')
20955 cpu_id = rs6000_select[i].string;
20956
20957 /* Look through the mapping array. Pick the first name that either
20958 matches the argument, has a bit set in IF_SET that is also set
20959 in the target flags, or has a NULL name. */
20960
20961 i = 0;
20962 while (mapping[i].arg != NULL
20963 && strcmp (mapping[i].arg, cpu_id) != 0
20964 && (mapping[i].if_set & target_flags) == 0)
20965 i++;
20966
20967 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20968}
20969
ee890fe2 20970#endif /* TARGET_MACHO */
7c262518
RH
20971
20972#if TARGET_ELF
9b580a0b
RH
20973static int
20974rs6000_elf_reloc_rw_mask (void)
7c262518 20975{
9b580a0b
RH
20976 if (flag_pic)
20977 return 3;
20978 else if (DEFAULT_ABI == ABI_AIX)
20979 return 2;
20980 else
20981 return 0;
7c262518 20982}
d9f6800d
RH
20983
20984/* Record an element in the table of global constructors. SYMBOL is
20985 a SYMBOL_REF of the function to be called; PRIORITY is a number
20986 between 0 and MAX_INIT_PRIORITY.
20987
20988 This differs from default_named_section_asm_out_constructor in
20989 that we have special handling for -mrelocatable. */
20990
20991static void
a2369ed3 20992rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20993{
20994 const char *section = ".ctors";
20995 char buf[16];
20996
20997 if (priority != DEFAULT_INIT_PRIORITY)
20998 {
20999 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21000 /* Invert the numbering so the linker puts us in the proper
21001 order; constructors are run from right to left, and the
21002 linker sorts in increasing order. */
21003 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21004 section = buf;
21005 }
21006
d6b5193b 21007 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21008 assemble_align (POINTER_SIZE);
d9f6800d
RH
21009
21010 if (TARGET_RELOCATABLE)
21011 {
21012 fputs ("\t.long (", asm_out_file);
21013 output_addr_const (asm_out_file, symbol);
21014 fputs (")@fixup\n", asm_out_file);
21015 }
21016 else
c8af3574 21017 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21018}
21019
21020static void
a2369ed3 21021rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21022{
21023 const char *section = ".dtors";
21024 char buf[16];
21025
21026 if (priority != DEFAULT_INIT_PRIORITY)
21027 {
21028 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21029 /* Invert the numbering so the linker puts us in the proper
21030 order; constructors are run from right to left, and the
21031 linker sorts in increasing order. */
21032 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21033 section = buf;
21034 }
21035
d6b5193b 21036 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21037 assemble_align (POINTER_SIZE);
d9f6800d
RH
21038
21039 if (TARGET_RELOCATABLE)
21040 {
21041 fputs ("\t.long (", asm_out_file);
21042 output_addr_const (asm_out_file, symbol);
21043 fputs (")@fixup\n", asm_out_file);
21044 }
21045 else
c8af3574 21046 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21047}
9739c90c
JJ
21048
21049void
a2369ed3 21050rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21051{
21052 if (TARGET_64BIT)
21053 {
21054 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21055 ASM_OUTPUT_LABEL (file, name);
21056 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21057 rs6000_output_function_entry (file, name);
21058 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21059 if (DOT_SYMBOLS)
9739c90c 21060 {
85b776df 21061 fputs ("\t.size\t", file);
9739c90c 21062 assemble_name (file, name);
85b776df
AM
21063 fputs (",24\n\t.type\t.", file);
21064 assemble_name (file, name);
21065 fputs (",@function\n", file);
21066 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21067 {
21068 fputs ("\t.globl\t.", file);
21069 assemble_name (file, name);
21070 putc ('\n', file);
21071 }
9739c90c 21072 }
85b776df
AM
21073 else
21074 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21075 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21076 rs6000_output_function_entry (file, name);
21077 fputs (":\n", file);
9739c90c
JJ
21078 return;
21079 }
21080
21081 if (TARGET_RELOCATABLE
7f970b70 21082 && !TARGET_SECURE_PLT
e3b5732b 21083 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21084 && uses_TOC ())
9739c90c
JJ
21085 {
21086 char buf[256];
21087
21088 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21089
21090 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21091 fprintf (file, "\t.long ");
21092 assemble_name (file, buf);
21093 putc ('-', file);
21094 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21095 assemble_name (file, buf);
21096 putc ('\n', file);
21097 }
21098
21099 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21100 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21101
21102 if (DEFAULT_ABI == ABI_AIX)
21103 {
21104 const char *desc_name, *orig_name;
21105
21106 orig_name = (*targetm.strip_name_encoding) (name);
21107 desc_name = orig_name;
21108 while (*desc_name == '.')
21109 desc_name++;
21110
21111 if (TREE_PUBLIC (decl))
21112 fprintf (file, "\t.globl %s\n", desc_name);
21113
21114 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21115 fprintf (file, "%s:\n", desc_name);
21116 fprintf (file, "\t.long %s\n", orig_name);
21117 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21118 if (DEFAULT_ABI == ABI_AIX)
21119 fputs ("\t.long 0\n", file);
21120 fprintf (file, "\t.previous\n");
21121 }
21122 ASM_OUTPUT_LABEL (file, name);
21123}
1334b570
AM
21124
21125static void
21126rs6000_elf_end_indicate_exec_stack (void)
21127{
21128 if (TARGET_32BIT)
21129 file_end_indicate_exec_stack ();
21130}
7c262518
RH
21131#endif
21132
cbaaba19 21133#if TARGET_XCOFF
0d5817b2
DE
21134static void
21135rs6000_xcoff_asm_output_anchor (rtx symbol)
21136{
21137 char buffer[100];
21138
21139 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21140 SYMBOL_REF_BLOCK_OFFSET (symbol));
21141 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21142}
21143
7c262518 21144static void
a2369ed3 21145rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21146{
21147 fputs (GLOBAL_ASM_OP, stream);
21148 RS6000_OUTPUT_BASENAME (stream, name);
21149 putc ('\n', stream);
21150}
21151
d6b5193b
RS
21152/* A get_unnamed_decl callback, used for read-only sections. PTR
21153 points to the section string variable. */
21154
21155static void
21156rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21157{
890f9edf
OH
21158 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21159 *(const char *const *) directive,
21160 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21161}
21162
21163/* Likewise for read-write sections. */
21164
21165static void
21166rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21167{
890f9edf
OH
21168 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21169 *(const char *const *) directive,
21170 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21171}
21172
21173/* A get_unnamed_section callback, used for switching to toc_section. */
21174
21175static void
21176rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21177{
21178 if (TARGET_MINIMAL_TOC)
21179 {
21180 /* toc_section is always selected at least once from
21181 rs6000_xcoff_file_start, so this is guaranteed to
21182 always be defined once and only once in each file. */
21183 if (!toc_initialized)
21184 {
21185 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21186 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21187 toc_initialized = 1;
21188 }
21189 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21190 (TARGET_32BIT ? "" : ",3"));
21191 }
21192 else
21193 fputs ("\t.toc\n", asm_out_file);
21194}
21195
21196/* Implement TARGET_ASM_INIT_SECTIONS. */
21197
21198static void
21199rs6000_xcoff_asm_init_sections (void)
21200{
21201 read_only_data_section
21202 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21203 &xcoff_read_only_section_name);
21204
21205 private_data_section
21206 = get_unnamed_section (SECTION_WRITE,
21207 rs6000_xcoff_output_readwrite_section_asm_op,
21208 &xcoff_private_data_section_name);
21209
21210 read_only_private_data_section
21211 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21212 &xcoff_private_data_section_name);
21213
21214 toc_section
21215 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21216
21217 readonly_data_section = read_only_data_section;
21218 exception_section = data_section;
21219}
21220
9b580a0b
RH
21221static int
21222rs6000_xcoff_reloc_rw_mask (void)
21223{
21224 return 3;
21225}
21226
b275d088 21227static void
c18a5b6c
MM
21228rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21229 tree decl ATTRIBUTE_UNUSED)
7c262518 21230{
0e5dbd9b
DE
21231 int smclass;
21232 static const char * const suffix[3] = { "PR", "RO", "RW" };
21233
21234 if (flags & SECTION_CODE)
21235 smclass = 0;
21236 else if (flags & SECTION_WRITE)
21237 smclass = 2;
21238 else
21239 smclass = 1;
21240
5b5198f7 21241 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21242 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21243 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21244}
ae46c4e0 21245
d6b5193b 21246static section *
f676971a 21247rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21248 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21249{
9b580a0b 21250 if (decl_readonly_section (decl, reloc))
ae46c4e0 21251 {
0e5dbd9b 21252 if (TREE_PUBLIC (decl))
d6b5193b 21253 return read_only_data_section;
ae46c4e0 21254 else
d6b5193b 21255 return read_only_private_data_section;
ae46c4e0
RH
21256 }
21257 else
21258 {
0e5dbd9b 21259 if (TREE_PUBLIC (decl))
d6b5193b 21260 return data_section;
ae46c4e0 21261 else
d6b5193b 21262 return private_data_section;
ae46c4e0
RH
21263 }
21264}
21265
21266static void
a2369ed3 21267rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21268{
21269 const char *name;
ae46c4e0 21270
5b5198f7
DE
21271 /* Use select_section for private and uninitialized data. */
21272 if (!TREE_PUBLIC (decl)
21273 || DECL_COMMON (decl)
0e5dbd9b
DE
21274 || DECL_INITIAL (decl) == NULL_TREE
21275 || DECL_INITIAL (decl) == error_mark_node
21276 || (flag_zero_initialized_in_bss
21277 && initializer_zerop (DECL_INITIAL (decl))))
21278 return;
21279
21280 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21281 name = (*targetm.strip_name_encoding) (name);
21282 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21283}
b64a1b53 21284
fb49053f
RH
21285/* Select section for constant in constant pool.
21286
21287 On RS/6000, all constants are in the private read-only data area.
21288 However, if this is being placed in the TOC it must be output as a
21289 toc entry. */
21290
d6b5193b 21291static section *
f676971a 21292rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21293 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21294{
21295 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21296 return toc_section;
b64a1b53 21297 else
d6b5193b 21298 return read_only_private_data_section;
b64a1b53 21299}
772c5265
RH
21300
21301/* Remove any trailing [DS] or the like from the symbol name. */
21302
21303static const char *
a2369ed3 21304rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21305{
21306 size_t len;
21307 if (*name == '*')
21308 name++;
21309 len = strlen (name);
21310 if (name[len - 1] == ']')
21311 return ggc_alloc_string (name, len - 4);
21312 else
21313 return name;
21314}
21315
5add3202
DE
21316/* Section attributes. AIX is always PIC. */
21317
21318static unsigned int
a2369ed3 21319rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21320{
5b5198f7 21321 unsigned int align;
9b580a0b 21322 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21323
21324 /* Align to at least UNIT size. */
21325 if (flags & SECTION_CODE)
21326 align = MIN_UNITS_PER_WORD;
21327 else
21328 /* Increase alignment of large objects if not already stricter. */
21329 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21330 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21331 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21332
21333 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21334}
a5fe455b 21335
1bc7c5b6
ZW
21336/* Output at beginning of assembler file.
21337
21338 Initialize the section names for the RS/6000 at this point.
21339
21340 Specify filename, including full path, to assembler.
21341
21342 We want to go into the TOC section so at least one .toc will be emitted.
21343 Also, in order to output proper .bs/.es pairs, we need at least one static
21344 [RW] section emitted.
21345
21346 Finally, declare mcount when profiling to make the assembler happy. */
21347
21348static void
863d938c 21349rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21350{
21351 rs6000_gen_section_name (&xcoff_bss_section_name,
21352 main_input_filename, ".bss_");
21353 rs6000_gen_section_name (&xcoff_private_data_section_name,
21354 main_input_filename, ".rw_");
21355 rs6000_gen_section_name (&xcoff_read_only_section_name,
21356 main_input_filename, ".ro_");
21357
21358 fputs ("\t.file\t", asm_out_file);
21359 output_quoted_string (asm_out_file, main_input_filename);
21360 fputc ('\n', asm_out_file);
1bc7c5b6 21361 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21362 switch_to_section (private_data_section);
21363 switch_to_section (text_section);
1bc7c5b6
ZW
21364 if (profile_flag)
21365 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21366 rs6000_file_start ();
21367}
21368
a5fe455b
ZW
21369/* Output at end of assembler file.
21370 On the RS/6000, referencing data should automatically pull in text. */
21371
21372static void
863d938c 21373rs6000_xcoff_file_end (void)
a5fe455b 21374{
d6b5193b 21375 switch_to_section (text_section);
a5fe455b 21376 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21377 switch_to_section (data_section);
a5fe455b
ZW
21378 fputs (TARGET_32BIT
21379 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21380 asm_out_file);
21381}
f1384257 21382#endif /* TARGET_XCOFF */
0e5dbd9b 21383
3c50106f
RH
21384/* Compute a (partial) cost for rtx X. Return true if the complete
21385 cost has been computed, and false if subexpressions should be
21386 scanned. In either case, *TOTAL contains the cost result. */
21387
21388static bool
1494c534 21389rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 21390{
f0517163
RS
21391 enum machine_mode mode = GET_MODE (x);
21392
3c50106f
RH
21393 switch (code)
21394 {
30a555d9 21395 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21396 case CONST_INT:
066cd967
DE
21397 if (((outer_code == SET
21398 || outer_code == PLUS
21399 || outer_code == MINUS)
279bb624
DE
21400 && (satisfies_constraint_I (x)
21401 || satisfies_constraint_L (x)))
066cd967 21402 || (outer_code == AND
279bb624
DE
21403 && (satisfies_constraint_K (x)
21404 || (mode == SImode
21405 ? satisfies_constraint_L (x)
21406 : satisfies_constraint_J (x))
1990cd79
AM
21407 || mask_operand (x, mode)
21408 || (mode == DImode
21409 && mask64_operand (x, DImode))))
22e54023 21410 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21411 && (satisfies_constraint_K (x)
21412 || (mode == SImode
21413 ? satisfies_constraint_L (x)
21414 : satisfies_constraint_J (x))))
066cd967
DE
21415 || outer_code == ASHIFT
21416 || outer_code == ASHIFTRT
21417 || outer_code == LSHIFTRT
21418 || outer_code == ROTATE
21419 || outer_code == ROTATERT
d5861a7a 21420 || outer_code == ZERO_EXTRACT
066cd967 21421 || (outer_code == MULT
279bb624 21422 && satisfies_constraint_I (x))
22e54023
DE
21423 || ((outer_code == DIV || outer_code == UDIV
21424 || outer_code == MOD || outer_code == UMOD)
21425 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21426 || (outer_code == COMPARE
279bb624
DE
21427 && (satisfies_constraint_I (x)
21428 || satisfies_constraint_K (x)))
22e54023 21429 || (outer_code == EQ
279bb624
DE
21430 && (satisfies_constraint_I (x)
21431 || satisfies_constraint_K (x)
21432 || (mode == SImode
21433 ? satisfies_constraint_L (x)
21434 : satisfies_constraint_J (x))))
22e54023 21435 || (outer_code == GTU
279bb624 21436 && satisfies_constraint_I (x))
22e54023 21437 || (outer_code == LTU
279bb624 21438 && satisfies_constraint_P (x)))
066cd967
DE
21439 {
21440 *total = 0;
21441 return true;
21442 }
21443 else if ((outer_code == PLUS
4ae234b0 21444 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21445 || (outer_code == MINUS
4ae234b0 21446 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21447 || ((outer_code == SET
21448 || outer_code == IOR
21449 || outer_code == XOR)
21450 && (INTVAL (x)
21451 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21452 {
21453 *total = COSTS_N_INSNS (1);
21454 return true;
21455 }
21456 /* FALLTHRU */
21457
21458 case CONST_DOUBLE:
f6fe3a22 21459 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21460 {
f6fe3a22
DE
21461 if ((outer_code == IOR || outer_code == XOR)
21462 && CONST_DOUBLE_HIGH (x) == 0
21463 && (CONST_DOUBLE_LOW (x)
21464 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21465 {
21466 *total = 0;
21467 return true;
21468 }
21469 else if ((outer_code == AND && and64_2_operand (x, DImode))
21470 || ((outer_code == SET
21471 || outer_code == IOR
21472 || outer_code == XOR)
21473 && CONST_DOUBLE_HIGH (x) == 0))
21474 {
21475 *total = COSTS_N_INSNS (1);
21476 return true;
21477 }
066cd967
DE
21478 }
21479 /* FALLTHRU */
21480
3c50106f 21481 case CONST:
066cd967 21482 case HIGH:
3c50106f 21483 case SYMBOL_REF:
066cd967
DE
21484 case MEM:
21485 /* When optimizing for size, MEM should be slightly more expensive
21486 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21487 L1 cache latency is about two instructions. */
066cd967 21488 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21489 return true;
21490
30a555d9
DE
21491 case LABEL_REF:
21492 *total = 0;
21493 return true;
21494
3c50106f 21495 case PLUS:
f0517163 21496 if (mode == DFmode)
066cd967
DE
21497 {
21498 if (GET_CODE (XEXP (x, 0)) == MULT)
21499 {
21500 /* FNMA accounted in outer NEG. */
21501 if (outer_code == NEG)
21502 *total = rs6000_cost->dmul - rs6000_cost->fp;
21503 else
21504 *total = rs6000_cost->dmul;
21505 }
21506 else
21507 *total = rs6000_cost->fp;
21508 }
f0517163 21509 else if (mode == SFmode)
066cd967
DE
21510 {
21511 /* FNMA accounted in outer NEG. */
21512 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21513 *total = 0;
21514 else
21515 *total = rs6000_cost->fp;
21516 }
f0517163 21517 else
066cd967
DE
21518 *total = COSTS_N_INSNS (1);
21519 return false;
3c50106f 21520
52190329 21521 case MINUS:
f0517163 21522 if (mode == DFmode)
066cd967 21523 {
762c919f
JM
21524 if (GET_CODE (XEXP (x, 0)) == MULT
21525 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21526 {
21527 /* FNMA accounted in outer NEG. */
21528 if (outer_code == NEG)
762c919f 21529 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21530 else
21531 *total = rs6000_cost->dmul;
21532 }
21533 else
21534 *total = rs6000_cost->fp;
21535 }
f0517163 21536 else if (mode == SFmode)
066cd967
DE
21537 {
21538 /* FNMA accounted in outer NEG. */
21539 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21540 *total = 0;
21541 else
21542 *total = rs6000_cost->fp;
21543 }
f0517163 21544 else
c4ad648e 21545 *total = COSTS_N_INSNS (1);
066cd967 21546 return false;
3c50106f
RH
21547
21548 case MULT:
c9dbf840 21549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21550 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21551 {
8b897cfa
RS
21552 if (INTVAL (XEXP (x, 1)) >= -256
21553 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21554 *total = rs6000_cost->mulsi_const9;
8b897cfa 21555 else
06a67bdd 21556 *total = rs6000_cost->mulsi_const;
3c50106f 21557 }
066cd967
DE
21558 /* FMA accounted in outer PLUS/MINUS. */
21559 else if ((mode == DFmode || mode == SFmode)
21560 && (outer_code == PLUS || outer_code == MINUS))
21561 *total = 0;
f0517163 21562 else if (mode == DFmode)
06a67bdd 21563 *total = rs6000_cost->dmul;
f0517163 21564 else if (mode == SFmode)
06a67bdd 21565 *total = rs6000_cost->fp;
f0517163 21566 else if (mode == DImode)
06a67bdd 21567 *total = rs6000_cost->muldi;
8b897cfa 21568 else
06a67bdd 21569 *total = rs6000_cost->mulsi;
066cd967 21570 return false;
3c50106f
RH
21571
21572 case DIV:
21573 case MOD:
f0517163
RS
21574 if (FLOAT_MODE_P (mode))
21575 {
06a67bdd
RS
21576 *total = mode == DFmode ? rs6000_cost->ddiv
21577 : rs6000_cost->sdiv;
066cd967 21578 return false;
f0517163 21579 }
5efb1046 21580 /* FALLTHRU */
3c50106f
RH
21581
21582 case UDIV:
21583 case UMOD:
627b6fe2
DJ
21584 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21585 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21586 {
21587 if (code == DIV || code == MOD)
21588 /* Shift, addze */
21589 *total = COSTS_N_INSNS (2);
21590 else
21591 /* Shift */
21592 *total = COSTS_N_INSNS (1);
21593 }
c4ad648e 21594 else
627b6fe2
DJ
21595 {
21596 if (GET_MODE (XEXP (x, 1)) == DImode)
21597 *total = rs6000_cost->divdi;
21598 else
21599 *total = rs6000_cost->divsi;
21600 }
21601 /* Add in shift and subtract for MOD. */
21602 if (code == MOD || code == UMOD)
21603 *total += COSTS_N_INSNS (2);
066cd967 21604 return false;
3c50106f 21605
32f56aad 21606 case CTZ:
3c50106f
RH
21607 case FFS:
21608 *total = COSTS_N_INSNS (4);
066cd967 21609 return false;
3c50106f 21610
32f56aad
DE
21611 case POPCOUNT:
21612 *total = COSTS_N_INSNS (6);
21613 return false;
21614
06a67bdd 21615 case NOT:
066cd967
DE
21616 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21617 {
21618 *total = 0;
21619 return false;
21620 }
21621 /* FALLTHRU */
21622
21623 case AND:
32f56aad 21624 case CLZ:
066cd967
DE
21625 case IOR:
21626 case XOR:
d5861a7a
DE
21627 case ZERO_EXTRACT:
21628 *total = COSTS_N_INSNS (1);
21629 return false;
21630
066cd967
DE
21631 case ASHIFT:
21632 case ASHIFTRT:
21633 case LSHIFTRT:
21634 case ROTATE:
21635 case ROTATERT:
d5861a7a 21636 /* Handle mul_highpart. */
066cd967
DE
21637 if (outer_code == TRUNCATE
21638 && GET_CODE (XEXP (x, 0)) == MULT)
21639 {
21640 if (mode == DImode)
21641 *total = rs6000_cost->muldi;
21642 else
21643 *total = rs6000_cost->mulsi;
21644 return true;
21645 }
d5861a7a
DE
21646 else if (outer_code == AND)
21647 *total = 0;
21648 else
21649 *total = COSTS_N_INSNS (1);
21650 return false;
21651
21652 case SIGN_EXTEND:
21653 case ZERO_EXTEND:
21654 if (GET_CODE (XEXP (x, 0)) == MEM)
21655 *total = 0;
21656 else
21657 *total = COSTS_N_INSNS (1);
066cd967 21658 return false;
06a67bdd 21659
066cd967
DE
21660 case COMPARE:
21661 case NEG:
21662 case ABS:
21663 if (!FLOAT_MODE_P (mode))
21664 {
21665 *total = COSTS_N_INSNS (1);
21666 return false;
21667 }
21668 /* FALLTHRU */
21669
21670 case FLOAT:
21671 case UNSIGNED_FLOAT:
21672 case FIX:
21673 case UNSIGNED_FIX:
06a67bdd
RS
21674 case FLOAT_TRUNCATE:
21675 *total = rs6000_cost->fp;
066cd967 21676 return false;
06a67bdd 21677
a2af5043
DJ
21678 case FLOAT_EXTEND:
21679 if (mode == DFmode)
21680 *total = 0;
21681 else
21682 *total = rs6000_cost->fp;
21683 return false;
21684
06a67bdd
RS
21685 case UNSPEC:
21686 switch (XINT (x, 1))
21687 {
21688 case UNSPEC_FRSP:
21689 *total = rs6000_cost->fp;
21690 return true;
21691
21692 default:
21693 break;
21694 }
21695 break;
21696
21697 case CALL:
21698 case IF_THEN_ELSE:
21699 if (optimize_size)
21700 {
21701 *total = COSTS_N_INSNS (1);
21702 return true;
21703 }
066cd967
DE
21704 else if (FLOAT_MODE_P (mode)
21705 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21706 {
21707 *total = rs6000_cost->fp;
21708 return false;
21709 }
06a67bdd
RS
21710 break;
21711
c0600ecd
DE
21712 case EQ:
21713 case GTU:
21714 case LTU:
22e54023
DE
21715 /* Carry bit requires mode == Pmode.
21716 NEG or PLUS already counted so only add one. */
21717 if (mode == Pmode
21718 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21719 {
22e54023
DE
21720 *total = COSTS_N_INSNS (1);
21721 return true;
21722 }
21723 if (outer_code == SET)
21724 {
21725 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21726 {
22e54023 21727 *total = COSTS_N_INSNS (2);
c0600ecd 21728 return true;
c0600ecd 21729 }
22e54023
DE
21730 else if (mode == Pmode)
21731 {
21732 *total = COSTS_N_INSNS (3);
21733 return false;
21734 }
21735 }
21736 /* FALLTHRU */
21737
21738 case GT:
21739 case LT:
21740 case UNORDERED:
21741 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21742 {
21743 *total = COSTS_N_INSNS (2);
21744 return true;
c0600ecd 21745 }
22e54023
DE
21746 /* CC COMPARE. */
21747 if (outer_code == COMPARE)
21748 {
21749 *total = 0;
21750 return true;
21751 }
21752 break;
c0600ecd 21753
3c50106f 21754 default:
06a67bdd 21755 break;
3c50106f 21756 }
06a67bdd
RS
21757
21758 return false;
3c50106f
RH
21759}
21760
34bb030a
DE
21761/* A C expression returning the cost of moving data from a register of class
21762 CLASS1 to one of CLASS2. */
21763
21764int
f676971a 21765rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21766 enum reg_class from, enum reg_class to)
34bb030a
DE
21767{
21768 /* Moves from/to GENERAL_REGS. */
21769 if (reg_classes_intersect_p (to, GENERAL_REGS)
21770 || reg_classes_intersect_p (from, GENERAL_REGS))
21771 {
21772 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21773 from = to;
21774
21775 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21776 return (rs6000_memory_move_cost (mode, from, 0)
21777 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21778
c4ad648e
AM
21779 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21780 shift. */
34bb030a
DE
21781 else if (from == CR_REGS)
21782 return 4;
21783
aafc759a
PH
21784 /* Power6 has slower LR/CTR moves so make them more expensive than
21785 memory in order to bias spills to memory .*/
21786 else if (rs6000_cpu == PROCESSOR_POWER6
21787 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
21788 return 6 * hard_regno_nregs[0][mode];
21789
34bb030a 21790 else
c4ad648e 21791 /* A move will cost one instruction per GPR moved. */
c8b622ff 21792 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21793 }
21794
c4ad648e 21795 /* Moving between two similar registers is just one instruction. */
34bb030a 21796 else if (reg_classes_intersect_p (to, from))
7393f7f8 21797 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21798
c4ad648e 21799 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21800 else
f676971a 21801 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21802 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21803}
21804
21805/* A C expressions returning the cost of moving data of MODE from a register to
21806 or from memory. */
21807
21808int
f676971a 21809rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21810 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21811{
21812 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21813 return 4 * hard_regno_nregs[0][mode];
34bb030a 21814 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21815 return 4 * hard_regno_nregs[32][mode];
34bb030a 21816 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21817 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21818 else
21819 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21820}
21821
9c78b944
DE
21822/* Returns a code for a target-specific builtin that implements
21823 reciprocal of the function, or NULL_TREE if not available. */
21824
21825static tree
21826rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21827 bool sqrt ATTRIBUTE_UNUSED)
21828{
21829 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21830 && flag_finite_math_only && !flag_trapping_math
21831 && flag_unsafe_math_optimizations))
21832 return NULL_TREE;
21833
21834 if (md_fn)
21835 return NULL_TREE;
21836 else
21837 switch (fn)
21838 {
21839 case BUILT_IN_SQRTF:
21840 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21841
21842 default:
21843 return NULL_TREE;
21844 }
21845}
21846
ef765ea9
DE
21847/* Newton-Raphson approximation of single-precision floating point divide n/d.
21848 Assumes no trapping math and finite arguments. */
21849
21850void
9c78b944 21851rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21852{
21853 rtx x0, e0, e1, y1, u0, v0, one;
21854
21855 x0 = gen_reg_rtx (SFmode);
21856 e0 = gen_reg_rtx (SFmode);
21857 e1 = gen_reg_rtx (SFmode);
21858 y1 = gen_reg_rtx (SFmode);
21859 u0 = gen_reg_rtx (SFmode);
21860 v0 = gen_reg_rtx (SFmode);
21861 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21862
21863 /* x0 = 1./d estimate */
21864 emit_insn (gen_rtx_SET (VOIDmode, x0,
21865 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21866 UNSPEC_FRES)));
21867 /* e0 = 1. - d * x0 */
21868 emit_insn (gen_rtx_SET (VOIDmode, e0,
21869 gen_rtx_MINUS (SFmode, one,
21870 gen_rtx_MULT (SFmode, d, x0))));
21871 /* e1 = e0 + e0 * e0 */
21872 emit_insn (gen_rtx_SET (VOIDmode, e1,
21873 gen_rtx_PLUS (SFmode,
21874 gen_rtx_MULT (SFmode, e0, e0), e0)));
21875 /* y1 = x0 + e1 * x0 */
21876 emit_insn (gen_rtx_SET (VOIDmode, y1,
21877 gen_rtx_PLUS (SFmode,
21878 gen_rtx_MULT (SFmode, e1, x0), x0)));
21879 /* u0 = n * y1 */
21880 emit_insn (gen_rtx_SET (VOIDmode, u0,
21881 gen_rtx_MULT (SFmode, n, y1)));
21882 /* v0 = n - d * u0 */
21883 emit_insn (gen_rtx_SET (VOIDmode, v0,
21884 gen_rtx_MINUS (SFmode, n,
21885 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21886 /* dst = u0 + v0 * y1 */
21887 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21888 gen_rtx_PLUS (SFmode,
21889 gen_rtx_MULT (SFmode, v0, y1), u0)));
21890}
21891
21892/* Newton-Raphson approximation of double-precision floating point divide n/d.
21893 Assumes no trapping math and finite arguments. */
21894
21895void
9c78b944 21896rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21897{
21898 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21899
21900 x0 = gen_reg_rtx (DFmode);
21901 e0 = gen_reg_rtx (DFmode);
21902 e1 = gen_reg_rtx (DFmode);
21903 e2 = gen_reg_rtx (DFmode);
21904 y1 = gen_reg_rtx (DFmode);
21905 y2 = gen_reg_rtx (DFmode);
21906 y3 = gen_reg_rtx (DFmode);
21907 u0 = gen_reg_rtx (DFmode);
21908 v0 = gen_reg_rtx (DFmode);
21909 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21910
21911 /* x0 = 1./d estimate */
21912 emit_insn (gen_rtx_SET (VOIDmode, x0,
21913 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21914 UNSPEC_FRES)));
21915 /* e0 = 1. - d * x0 */
21916 emit_insn (gen_rtx_SET (VOIDmode, e0,
21917 gen_rtx_MINUS (DFmode, one,
21918 gen_rtx_MULT (SFmode, d, x0))));
21919 /* y1 = x0 + e0 * x0 */
21920 emit_insn (gen_rtx_SET (VOIDmode, y1,
21921 gen_rtx_PLUS (DFmode,
21922 gen_rtx_MULT (DFmode, e0, x0), x0)));
21923 /* e1 = e0 * e0 */
21924 emit_insn (gen_rtx_SET (VOIDmode, e1,
21925 gen_rtx_MULT (DFmode, e0, e0)));
21926 /* y2 = y1 + e1 * y1 */
21927 emit_insn (gen_rtx_SET (VOIDmode, y2,
21928 gen_rtx_PLUS (DFmode,
21929 gen_rtx_MULT (DFmode, e1, y1), y1)));
21930 /* e2 = e1 * e1 */
21931 emit_insn (gen_rtx_SET (VOIDmode, e2,
21932 gen_rtx_MULT (DFmode, e1, e1)));
21933 /* y3 = y2 + e2 * y2 */
21934 emit_insn (gen_rtx_SET (VOIDmode, y3,
21935 gen_rtx_PLUS (DFmode,
21936 gen_rtx_MULT (DFmode, e2, y2), y2)));
21937 /* u0 = n * y3 */
21938 emit_insn (gen_rtx_SET (VOIDmode, u0,
21939 gen_rtx_MULT (DFmode, n, y3)));
21940 /* v0 = n - d * u0 */
21941 emit_insn (gen_rtx_SET (VOIDmode, v0,
21942 gen_rtx_MINUS (DFmode, n,
21943 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21944 /* dst = u0 + v0 * y3 */
21945 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21946 gen_rtx_PLUS (DFmode,
21947 gen_rtx_MULT (DFmode, v0, y3), u0)));
21948}
21949
565ef4ba 21950
9c78b944
DE
21951/* Newton-Raphson approximation of single-precision floating point rsqrt.
21952 Assumes no trapping math and finite arguments. */
21953
21954void
21955rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21956{
21957 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21958 half, one, halfthree, c1, cond, label;
21959
21960 x0 = gen_reg_rtx (SFmode);
21961 x1 = gen_reg_rtx (SFmode);
21962 x2 = gen_reg_rtx (SFmode);
21963 y1 = gen_reg_rtx (SFmode);
21964 u0 = gen_reg_rtx (SFmode);
21965 u1 = gen_reg_rtx (SFmode);
21966 u2 = gen_reg_rtx (SFmode);
21967 v0 = gen_reg_rtx (SFmode);
21968 v1 = gen_reg_rtx (SFmode);
21969 v2 = gen_reg_rtx (SFmode);
21970 t0 = gen_reg_rtx (SFmode);
21971 halfthree = gen_reg_rtx (SFmode);
21972 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21973 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21974
21975 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21976 emit_insn (gen_rtx_SET (VOIDmode, t0,
21977 gen_rtx_MULT (SFmode, src, src)));
21978
21979 emit_insn (gen_rtx_SET (VOIDmode, cond,
21980 gen_rtx_COMPARE (CCFPmode, t0, src)));
21981 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21982 emit_unlikely_jump (c1, label);
21983
21984 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21985 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21986
21987 /* halfthree = 1.5 = 1.0 + 0.5 */
21988 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21989 gen_rtx_PLUS (SFmode, one, half)));
21990
21991 /* x0 = rsqrt estimate */
21992 emit_insn (gen_rtx_SET (VOIDmode, x0,
21993 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21994 UNSPEC_RSQRT)));
21995
21996 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21997 emit_insn (gen_rtx_SET (VOIDmode, y1,
21998 gen_rtx_MINUS (SFmode,
21999 gen_rtx_MULT (SFmode, src, halfthree),
22000 src)));
22001
22002 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22003 emit_insn (gen_rtx_SET (VOIDmode, u0,
22004 gen_rtx_MULT (SFmode, x0, x0)));
22005 emit_insn (gen_rtx_SET (VOIDmode, v0,
22006 gen_rtx_MINUS (SFmode,
22007 halfthree,
22008 gen_rtx_MULT (SFmode, y1, u0))));
22009 emit_insn (gen_rtx_SET (VOIDmode, x1,
22010 gen_rtx_MULT (SFmode, x0, v0)));
22011
22012 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22013 emit_insn (gen_rtx_SET (VOIDmode, u1,
22014 gen_rtx_MULT (SFmode, x1, x1)));
22015 emit_insn (gen_rtx_SET (VOIDmode, v1,
22016 gen_rtx_MINUS (SFmode,
22017 halfthree,
22018 gen_rtx_MULT (SFmode, y1, u1))));
22019 emit_insn (gen_rtx_SET (VOIDmode, x2,
22020 gen_rtx_MULT (SFmode, x1, v1)));
22021
22022 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22023 emit_insn (gen_rtx_SET (VOIDmode, u2,
22024 gen_rtx_MULT (SFmode, x2, x2)));
22025 emit_insn (gen_rtx_SET (VOIDmode, v2,
22026 gen_rtx_MINUS (SFmode,
22027 halfthree,
22028 gen_rtx_MULT (SFmode, y1, u2))));
22029 emit_insn (gen_rtx_SET (VOIDmode, dst,
22030 gen_rtx_MULT (SFmode, x2, v2)));
22031
22032 emit_label (XEXP (label, 0));
22033}
22034
565ef4ba
RS
22035/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22036 target, and SRC is the argument operand. */
22037
22038void
22039rs6000_emit_popcount (rtx dst, rtx src)
22040{
22041 enum machine_mode mode = GET_MODE (dst);
22042 rtx tmp1, tmp2;
22043
22044 tmp1 = gen_reg_rtx (mode);
22045
22046 if (mode == SImode)
22047 {
22048 emit_insn (gen_popcntbsi2 (tmp1, src));
22049 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22050 NULL_RTX, 0);
22051 tmp2 = force_reg (SImode, tmp2);
22052 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22053 }
22054 else
22055 {
22056 emit_insn (gen_popcntbdi2 (tmp1, src));
22057 tmp2 = expand_mult (DImode, tmp1,
22058 GEN_INT ((HOST_WIDE_INT)
22059 0x01010101 << 32 | 0x01010101),
22060 NULL_RTX, 0);
22061 tmp2 = force_reg (DImode, tmp2);
22062 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22063 }
22064}
22065
22066
22067/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22068 target, and SRC is the argument operand. */
22069
22070void
22071rs6000_emit_parity (rtx dst, rtx src)
22072{
22073 enum machine_mode mode = GET_MODE (dst);
22074 rtx tmp;
22075
22076 tmp = gen_reg_rtx (mode);
22077 if (mode == SImode)
22078 {
22079 /* Is mult+shift >= shift+xor+shift+xor? */
22080 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22081 {
22082 rtx tmp1, tmp2, tmp3, tmp4;
22083
22084 tmp1 = gen_reg_rtx (SImode);
22085 emit_insn (gen_popcntbsi2 (tmp1, src));
22086
22087 tmp2 = gen_reg_rtx (SImode);
22088 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22089 tmp3 = gen_reg_rtx (SImode);
22090 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22091
22092 tmp4 = gen_reg_rtx (SImode);
22093 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22094 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22095 }
22096 else
22097 rs6000_emit_popcount (tmp, src);
22098 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22099 }
22100 else
22101 {
22102 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22103 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22104 {
22105 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22106
22107 tmp1 = gen_reg_rtx (DImode);
22108 emit_insn (gen_popcntbdi2 (tmp1, src));
22109
22110 tmp2 = gen_reg_rtx (DImode);
22111 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22112 tmp3 = gen_reg_rtx (DImode);
22113 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22114
22115 tmp4 = gen_reg_rtx (DImode);
22116 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22117 tmp5 = gen_reg_rtx (DImode);
22118 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22119
22120 tmp6 = gen_reg_rtx (DImode);
22121 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22122 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22123 }
22124 else
22125 rs6000_emit_popcount (tmp, src);
22126 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22127 }
22128}
22129
ded9bf77
AH
22130/* Return an RTX representing where to find the function value of a
22131 function returning MODE. */
22132static rtx
22133rs6000_complex_function_value (enum machine_mode mode)
22134{
22135 unsigned int regno;
22136 rtx r1, r2;
22137 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22138 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22139
18f63bfa
AH
22140 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22141 regno = FP_ARG_RETURN;
354ed18f
AH
22142 else
22143 {
18f63bfa 22144 regno = GP_ARG_RETURN;
ded9bf77 22145
18f63bfa
AH
22146 /* 32-bit is OK since it'll go in r3/r4. */
22147 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22148 return gen_rtx_REG (mode, regno);
22149 }
22150
18f63bfa
AH
22151 if (inner_bytes >= 8)
22152 return gen_rtx_REG (mode, regno);
22153
ded9bf77
AH
22154 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22155 const0_rtx);
22156 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22157 GEN_INT (inner_bytes));
ded9bf77
AH
22158 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22159}
22160
a6ebc39a
AH
22161/* Define how to find the value returned by a function.
22162 VALTYPE is the data type of the value (as a tree).
22163 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22164 otherwise, FUNC is 0.
22165
22166 On the SPE, both FPs and vectors are returned in r3.
22167
22168 On RS/6000 an integer value is in r3 and a floating-point value is in
22169 fp1, unless -msoft-float. */
22170
22171rtx
586de218 22172rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22173{
22174 enum machine_mode mode;
2a8fa26c 22175 unsigned int regno;
a6ebc39a 22176
594a51fe
SS
22177 /* Special handling for structs in darwin64. */
22178 if (rs6000_darwin64_abi
22179 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22180 && TREE_CODE (valtype) == RECORD_TYPE
22181 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22182 {
22183 CUMULATIVE_ARGS valcum;
22184 rtx valret;
22185
0b5383eb 22186 valcum.words = 0;
594a51fe
SS
22187 valcum.fregno = FP_ARG_MIN_REG;
22188 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22189 /* Do a trial code generation as if this were going to be passed as
22190 an argument; if any part goes in memory, we return NULL. */
22191 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22192 if (valret)
22193 return valret;
22194 /* Otherwise fall through to standard ABI rules. */
22195 }
22196
0e67400a
FJ
22197 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22198 {
22199 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22200 return gen_rtx_PARALLEL (DImode,
22201 gen_rtvec (2,
22202 gen_rtx_EXPR_LIST (VOIDmode,
22203 gen_rtx_REG (SImode, GP_ARG_RETURN),
22204 const0_rtx),
22205 gen_rtx_EXPR_LIST (VOIDmode,
22206 gen_rtx_REG (SImode,
22207 GP_ARG_RETURN + 1),
22208 GEN_INT (4))));
22209 }
0f086e42
FJ
22210 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22211 {
22212 return gen_rtx_PARALLEL (DCmode,
22213 gen_rtvec (4,
22214 gen_rtx_EXPR_LIST (VOIDmode,
22215 gen_rtx_REG (SImode, GP_ARG_RETURN),
22216 const0_rtx),
22217 gen_rtx_EXPR_LIST (VOIDmode,
22218 gen_rtx_REG (SImode,
22219 GP_ARG_RETURN + 1),
22220 GEN_INT (4)),
22221 gen_rtx_EXPR_LIST (VOIDmode,
22222 gen_rtx_REG (SImode,
22223 GP_ARG_RETURN + 2),
22224 GEN_INT (8)),
22225 gen_rtx_EXPR_LIST (VOIDmode,
22226 gen_rtx_REG (SImode,
22227 GP_ARG_RETURN + 3),
22228 GEN_INT (12))));
22229 }
602ea4d3 22230
7348aa7f
FXC
22231 mode = TYPE_MODE (valtype);
22232 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22233 || POINTER_TYPE_P (valtype))
b78d48dd 22234 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22235
e41b2a33
PB
22236 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22237 /* _Decimal128 must use an even/odd register pair. */
22238 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22239 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 22240 regno = FP_ARG_RETURN;
ded9bf77 22241 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22242 && targetm.calls.split_complex_arg)
ded9bf77 22243 return rs6000_complex_function_value (mode);
44688022 22244 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22245 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22246 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22247 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22248 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22249 && (mode == DFmode || mode == DCmode
22250 || mode == TFmode || mode == TCmode))
18f63bfa 22251 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22252 else
22253 regno = GP_ARG_RETURN;
22254
22255 return gen_rtx_REG (mode, regno);
22256}
22257
ded9bf77
AH
22258/* Define how to find the value returned by a library function
22259 assuming the value has mode MODE. */
22260rtx
22261rs6000_libcall_value (enum machine_mode mode)
22262{
22263 unsigned int regno;
22264
2e6c9641
FJ
22265 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22266 {
22267 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22268 return gen_rtx_PARALLEL (DImode,
22269 gen_rtvec (2,
22270 gen_rtx_EXPR_LIST (VOIDmode,
22271 gen_rtx_REG (SImode, GP_ARG_RETURN),
22272 const0_rtx),
22273 gen_rtx_EXPR_LIST (VOIDmode,
22274 gen_rtx_REG (SImode,
22275 GP_ARG_RETURN + 1),
22276 GEN_INT (4))));
22277 }
22278
e41b2a33
PB
22279 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22280 /* _Decimal128 must use an even/odd register pair. */
22281 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22282 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22283 && TARGET_HARD_FLOAT && TARGET_FPRS)
22284 regno = FP_ARG_RETURN;
44688022
AM
22285 else if (ALTIVEC_VECTOR_MODE (mode)
22286 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22287 regno = ALTIVEC_ARG_RETURN;
42ba5130 22288 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22289 return rs6000_complex_function_value (mode);
18f63bfa 22290 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22291 && (mode == DFmode || mode == DCmode
22292 || mode == TFmode || mode == TCmode))
18f63bfa 22293 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22294 else
22295 regno = GP_ARG_RETURN;
22296
22297 return gen_rtx_REG (mode, regno);
22298}
22299
d1d0c603
JJ
22300/* Define the offset between two registers, FROM to be eliminated and its
22301 replacement TO, at the start of a routine. */
22302HOST_WIDE_INT
22303rs6000_initial_elimination_offset (int from, int to)
22304{
22305 rs6000_stack_t *info = rs6000_stack_info ();
22306 HOST_WIDE_INT offset;
22307
7d5175e1 22308 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22309 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22310 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22311 {
22312 offset = info->push_p ? 0 : -info->total_size;
22313 if (FRAME_GROWS_DOWNWARD)
5b667039 22314 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22315 }
22316 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22317 offset = FRAME_GROWS_DOWNWARD
5b667039 22318 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22319 : 0;
22320 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22321 offset = info->total_size;
22322 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22323 offset = info->push_p ? info->total_size : 0;
22324 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22325 offset = 0;
22326 else
37409796 22327 gcc_unreachable ();
d1d0c603
JJ
22328
22329 return offset;
22330}
22331
58646b77 22332/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 22333
c8e4f0e9 22334static bool
3101faab 22335rs6000_is_opaque_type (const_tree type)
62e1dfcf 22336{
58646b77 22337 return (type == opaque_V2SI_type_node
2abe3e28 22338 || type == opaque_V2SF_type_node
58646b77 22339 || type == opaque_V4SI_type_node);
62e1dfcf
NC
22340}
22341
96714395 22342static rtx
a2369ed3 22343rs6000_dwarf_register_span (rtx reg)
96714395
AH
22344{
22345 unsigned regno;
22346
4d4cbc0e
AH
22347 if (TARGET_SPE
22348 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
22349 || (TARGET_E500_DOUBLE
22350 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
22351 ;
22352 else
96714395
AH
22353 return NULL_RTX;
22354
22355 regno = REGNO (reg);
22356
22357 /* The duality of the SPE register size wreaks all kinds of havoc.
22358 This is a way of distinguishing r0 in 32-bits from r0 in
22359 64-bits. */
22360 return
22361 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
22362 BYTES_BIG_ENDIAN
22363 ? gen_rtvec (2,
22364 gen_rtx_REG (SImode, regno + 1200),
22365 gen_rtx_REG (SImode, regno))
22366 : gen_rtvec (2,
22367 gen_rtx_REG (SImode, regno),
22368 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
22369}
22370
37ea0b7e
JM
22371/* Fill in sizes for SPE register high parts in table used by unwinder. */
22372
22373static void
22374rs6000_init_dwarf_reg_sizes_extra (tree address)
22375{
22376 if (TARGET_SPE)
22377 {
22378 int i;
22379 enum machine_mode mode = TYPE_MODE (char_type_node);
22380 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22381 rtx mem = gen_rtx_MEM (BLKmode, addr);
22382 rtx value = gen_int_mode (4, mode);
22383
22384 for (i = 1201; i < 1232; i++)
22385 {
22386 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22387 HOST_WIDE_INT offset
22388 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22389
22390 emit_move_insn (adjust_address (mem, mode, offset), value);
22391 }
22392 }
22393}
22394
93c9d1ba
AM
22395/* Map internal gcc register numbers to DWARF2 register numbers. */
22396
22397unsigned int
22398rs6000_dbx_register_number (unsigned int regno)
22399{
22400 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22401 return regno;
22402 if (regno == MQ_REGNO)
22403 return 100;
1de43f85 22404 if (regno == LR_REGNO)
93c9d1ba 22405 return 108;
1de43f85 22406 if (regno == CTR_REGNO)
93c9d1ba
AM
22407 return 109;
22408 if (CR_REGNO_P (regno))
22409 return regno - CR0_REGNO + 86;
22410 if (regno == XER_REGNO)
22411 return 101;
22412 if (ALTIVEC_REGNO_P (regno))
22413 return regno - FIRST_ALTIVEC_REGNO + 1124;
22414 if (regno == VRSAVE_REGNO)
22415 return 356;
22416 if (regno == VSCR_REGNO)
22417 return 67;
22418 if (regno == SPE_ACC_REGNO)
22419 return 99;
22420 if (regno == SPEFSCR_REGNO)
22421 return 612;
22422 /* SPE high reg number. We get these values of regno from
22423 rs6000_dwarf_register_span. */
37409796
NS
22424 gcc_assert (regno >= 1200 && regno < 1232);
22425 return regno;
93c9d1ba
AM
22426}
22427
93f90be6 22428/* target hook eh_return_filter_mode */
f676971a 22429static enum machine_mode
93f90be6
FJ
22430rs6000_eh_return_filter_mode (void)
22431{
22432 return TARGET_32BIT ? SImode : word_mode;
22433}
22434
00b79d54
BE
22435/* Target hook for scalar_mode_supported_p. */
22436static bool
22437rs6000_scalar_mode_supported_p (enum machine_mode mode)
22438{
22439 if (DECIMAL_FLOAT_MODE_P (mode))
22440 return true;
22441 else
22442 return default_scalar_mode_supported_p (mode);
22443}
22444
f676971a
EC
22445/* Target hook for vector_mode_supported_p. */
22446static bool
22447rs6000_vector_mode_supported_p (enum machine_mode mode)
22448{
22449
96038623
DE
22450 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22451 return true;
22452
f676971a
EC
22453 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22454 return true;
22455
22456 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22457 return true;
22458
22459 else
22460 return false;
22461}
22462
bb8df8a6
EC
22463/* Target hook for invalid_arg_for_unprototyped_fn. */
22464static const char *
3101faab 22465invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22466{
22467 return (!rs6000_darwin64_abi
22468 && typelist == 0
22469 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22470 && (funcdecl == NULL_TREE
22471 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22472 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22473 ? N_("AltiVec argument passed to unprototyped function")
22474 : NULL;
22475}
22476
3aebbe5f
JJ
22477/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22478 setup by using __stack_chk_fail_local hidden function instead of
22479 calling __stack_chk_fail directly. Otherwise it is better to call
22480 __stack_chk_fail directly. */
22481
22482static tree
22483rs6000_stack_protect_fail (void)
22484{
22485 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22486 ? default_hidden_stack_protect_fail ()
22487 : default_external_stack_protect_fail ();
22488}
22489
17211ab5 22490#include "gt-rs6000.h"