]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR target/22152 (Poor loop optimization when using mmx builtins)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
a3170dc6
AH
177/* Nonzero if we want SPE ABI extensions. */
178int rs6000_spe_abi;
179
5da702b1
AH
180/* Nonzero if floating point operations are done in the GPRs. */
181int rs6000_float_gprs = 0;
182
594a51fe
SS
183/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
184int rs6000_darwin64_abi;
185
a0ab749a 186/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 187static GTY(()) int common_mode_defined;
c81bebd7 188
9878760c
RK
189/* Save information from a "cmpxx" operation until the branch or scc is
190 emitted. */
9878760c
RK
191rtx rs6000_compare_op0, rs6000_compare_op1;
192int rs6000_compare_fp_p;
874a0744 193
874a0744
MM
194/* Label number of label created for -mrelocatable, to call to so we can
195 get the address of the GOT section */
196int rs6000_pic_labelno;
c81bebd7 197
b91da81f 198#ifdef USING_ELFOS_H
c81bebd7 199/* Which abi to adhere to */
9739c90c 200const char *rs6000_abi_name;
d9407988
MM
201
202/* Semantics of the small data area */
203enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
204
205/* Which small data model to use */
815cdc52 206const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
207
208/* Counter for labels which are to be placed in .fixup. */
209int fixuplabelno = 0;
874a0744 210#endif
4697a36c 211
c4501e62
JJ
212/* Bit size of immediate TLS offsets and string from which it is decoded. */
213int rs6000_tls_size = 32;
214const char *rs6000_tls_size_string;
215
b6c9286a
MM
216/* ABI enumeration available for subtarget to use. */
217enum rs6000_abi rs6000_current_abi;
218
85b776df
AM
219/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
220int dot_symbols;
221
38c1f2d7 222/* Debug flags */
815cdc52 223const char *rs6000_debug_name;
38c1f2d7
MM
224int rs6000_debug_stack; /* debug stack applications */
225int rs6000_debug_arg; /* debug argument handling */
226
aabcd309 227/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
228bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
229
58646b77
PB
230/* Built in types. */
231
232tree rs6000_builtin_types[RS6000_BTI_MAX];
233tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 234
57ac7be9
AM
235const char *rs6000_traceback_name;
236static enum {
237 traceback_default = 0,
238 traceback_none,
239 traceback_part,
240 traceback_full
241} rs6000_traceback;
242
38c1f2d7
MM
243/* Flag to say the TOC is initialized */
244int toc_initialized;
9ebbca7d 245char toc_label_name[10];
38c1f2d7 246
44cd321e
PS
247/* Cached value of rs6000_variable_issue. This is cached in
248 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
249static short cached_can_issue_more;
250
d6b5193b
RS
251static GTY(()) section *read_only_data_section;
252static GTY(()) section *private_data_section;
253static GTY(()) section *read_only_private_data_section;
254static GTY(()) section *sdata2_section;
255static GTY(()) section *toc_section;
256
a3c9585f
KH
257/* Control alignment for fields within structures. */
258/* String from -malign-XXXXX. */
025d9908
KH
259int rs6000_alignment_flags;
260
78f5898b
AH
261/* True for any options that were explicitly set. */
262struct {
df01da37 263 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 264 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
265 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
266 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
267 bool spe; /* True if -mspe= was used. */
268 bool float_gprs; /* True if -mfloat-gprs= was used. */
269 bool isel; /* True if -misel was used. */
270 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 271 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 272 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
273} rs6000_explicit_options;
274
a3170dc6
AH
275struct builtin_description
276{
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
280 unsigned int mask;
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
284};
8b897cfa
RS
285\f
286/* Target cpu costs. */
287
288struct processor_costs {
c4ad648e 289 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
299 const int cache_line_size; /* cache line size in bytes. */
300 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
301 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
302 const int simultaneous_prefetches; /* number of parallel prefetch
303 operations. */
8b897cfa
RS
304};
305
306const struct processor_costs *rs6000_cost;
307
308/* Processor costs (relative to an add) */
309
310/* Instruction size costs on 32bit processors. */
311static const
312struct processor_costs size32_cost = {
06a67bdd
RS
313 COSTS_N_INSNS (1), /* mulsi */
314 COSTS_N_INSNS (1), /* mulsi_const */
315 COSTS_N_INSNS (1), /* mulsi_const9 */
316 COSTS_N_INSNS (1), /* muldi */
317 COSTS_N_INSNS (1), /* divsi */
318 COSTS_N_INSNS (1), /* divdi */
319 COSTS_N_INSNS (1), /* fp */
320 COSTS_N_INSNS (1), /* dmul */
321 COSTS_N_INSNS (1), /* sdiv */
322 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
323 32,
324 0,
325 0,
5f732aba 326 0,
8b897cfa
RS
327};
328
329/* Instruction size costs on 64bit processors. */
330static const
331struct processor_costs size64_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (1), /* mulsi */
333 COSTS_N_INSNS (1), /* mulsi_const */
334 COSTS_N_INSNS (1), /* mulsi_const9 */
335 COSTS_N_INSNS (1), /* muldi */
336 COSTS_N_INSNS (1), /* divsi */
337 COSTS_N_INSNS (1), /* divdi */
338 COSTS_N_INSNS (1), /* fp */
339 COSTS_N_INSNS (1), /* dmul */
340 COSTS_N_INSNS (1), /* sdiv */
341 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
342 128,
343 0,
344 0,
5f732aba 345 0,
8b897cfa
RS
346};
347
348/* Instruction costs on RIOS1 processors. */
349static const
350struct processor_costs rios1_cost = {
06a67bdd
RS
351 COSTS_N_INSNS (5), /* mulsi */
352 COSTS_N_INSNS (4), /* mulsi_const */
353 COSTS_N_INSNS (3), /* mulsi_const9 */
354 COSTS_N_INSNS (5), /* muldi */
355 COSTS_N_INSNS (19), /* divsi */
356 COSTS_N_INSNS (19), /* divdi */
357 COSTS_N_INSNS (2), /* fp */
358 COSTS_N_INSNS (2), /* dmul */
359 COSTS_N_INSNS (19), /* sdiv */
360 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 361 128, /* cache line size */
5f732aba
DE
362 64, /* l1 cache */
363 512, /* l2 cache */
0b11da67 364 0, /* streams */
8b897cfa
RS
365};
366
367/* Instruction costs on RIOS2 processors. */
368static const
369struct processor_costs rios2_cost = {
06a67bdd
RS
370 COSTS_N_INSNS (2), /* mulsi */
371 COSTS_N_INSNS (2), /* mulsi_const */
372 COSTS_N_INSNS (2), /* mulsi_const9 */
373 COSTS_N_INSNS (2), /* muldi */
374 COSTS_N_INSNS (13), /* divsi */
375 COSTS_N_INSNS (13), /* divdi */
376 COSTS_N_INSNS (2), /* fp */
377 COSTS_N_INSNS (2), /* dmul */
378 COSTS_N_INSNS (17), /* sdiv */
379 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 380 256, /* cache line size */
5f732aba
DE
381 256, /* l1 cache */
382 1024, /* l2 cache */
0b11da67 383 0, /* streams */
8b897cfa
RS
384};
385
386/* Instruction costs on RS64A processors. */
387static const
388struct processor_costs rs64a_cost = {
06a67bdd
RS
389 COSTS_N_INSNS (20), /* mulsi */
390 COSTS_N_INSNS (12), /* mulsi_const */
391 COSTS_N_INSNS (8), /* mulsi_const9 */
392 COSTS_N_INSNS (34), /* muldi */
393 COSTS_N_INSNS (65), /* divsi */
394 COSTS_N_INSNS (67), /* divdi */
395 COSTS_N_INSNS (4), /* fp */
396 COSTS_N_INSNS (4), /* dmul */
397 COSTS_N_INSNS (31), /* sdiv */
398 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 399 128, /* cache line size */
5f732aba
DE
400 128, /* l1 cache */
401 2048, /* l2 cache */
0b11da67 402 1, /* streams */
8b897cfa
RS
403};
404
405/* Instruction costs on MPCCORE processors. */
406static const
407struct processor_costs mpccore_cost = {
06a67bdd
RS
408 COSTS_N_INSNS (2), /* mulsi */
409 COSTS_N_INSNS (2), /* mulsi_const */
410 COSTS_N_INSNS (2), /* mulsi_const9 */
411 COSTS_N_INSNS (2), /* muldi */
412 COSTS_N_INSNS (6), /* divsi */
413 COSTS_N_INSNS (6), /* divdi */
414 COSTS_N_INSNS (4), /* fp */
415 COSTS_N_INSNS (5), /* dmul */
416 COSTS_N_INSNS (10), /* sdiv */
417 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 418 32, /* cache line size */
5f732aba
DE
419 4, /* l1 cache */
420 16, /* l2 cache */
0b11da67 421 1, /* streams */
8b897cfa
RS
422};
423
424/* Instruction costs on PPC403 processors. */
425static const
426struct processor_costs ppc403_cost = {
06a67bdd
RS
427 COSTS_N_INSNS (4), /* mulsi */
428 COSTS_N_INSNS (4), /* mulsi_const */
429 COSTS_N_INSNS (4), /* mulsi_const9 */
430 COSTS_N_INSNS (4), /* muldi */
431 COSTS_N_INSNS (33), /* divsi */
432 COSTS_N_INSNS (33), /* divdi */
433 COSTS_N_INSNS (11), /* fp */
434 COSTS_N_INSNS (11), /* dmul */
435 COSTS_N_INSNS (11), /* sdiv */
436 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 437 32, /* cache line size */
5f732aba
DE
438 4, /* l1 cache */
439 16, /* l2 cache */
0b11da67 440 1, /* streams */
8b897cfa
RS
441};
442
443/* Instruction costs on PPC405 processors. */
444static const
445struct processor_costs ppc405_cost = {
06a67bdd
RS
446 COSTS_N_INSNS (5), /* mulsi */
447 COSTS_N_INSNS (4), /* mulsi_const */
448 COSTS_N_INSNS (3), /* mulsi_const9 */
449 COSTS_N_INSNS (5), /* muldi */
450 COSTS_N_INSNS (35), /* divsi */
451 COSTS_N_INSNS (35), /* divdi */
452 COSTS_N_INSNS (11), /* fp */
453 COSTS_N_INSNS (11), /* dmul */
454 COSTS_N_INSNS (11), /* sdiv */
455 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 456 32, /* cache line size */
5f732aba
DE
457 16, /* l1 cache */
458 128, /* l2 cache */
0b11da67 459 1, /* streams */
8b897cfa
RS
460};
461
462/* Instruction costs on PPC440 processors. */
463static const
464struct processor_costs ppc440_cost = {
06a67bdd
RS
465 COSTS_N_INSNS (3), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (3), /* muldi */
469 COSTS_N_INSNS (34), /* divsi */
470 COSTS_N_INSNS (34), /* divdi */
471 COSTS_N_INSNS (5), /* fp */
472 COSTS_N_INSNS (5), /* dmul */
473 COSTS_N_INSNS (19), /* sdiv */
474 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 475 32, /* cache line size */
5f732aba
DE
476 32, /* l1 cache */
477 256, /* l2 cache */
0b11da67 478 1, /* streams */
8b897cfa
RS
479};
480
481/* Instruction costs on PPC601 processors. */
482static const
483struct processor_costs ppc601_cost = {
06a67bdd
RS
484 COSTS_N_INSNS (5), /* mulsi */
485 COSTS_N_INSNS (5), /* mulsi_const */
486 COSTS_N_INSNS (5), /* mulsi_const9 */
487 COSTS_N_INSNS (5), /* muldi */
488 COSTS_N_INSNS (36), /* divsi */
489 COSTS_N_INSNS (36), /* divdi */
490 COSTS_N_INSNS (4), /* fp */
491 COSTS_N_INSNS (5), /* dmul */
492 COSTS_N_INSNS (17), /* sdiv */
493 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 494 32, /* cache line size */
5f732aba
DE
495 32, /* l1 cache */
496 256, /* l2 cache */
0b11da67 497 1, /* streams */
8b897cfa
RS
498};
499
500/* Instruction costs on PPC603 processors. */
501static const
502struct processor_costs ppc603_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (3), /* mulsi_const */
505 COSTS_N_INSNS (2), /* mulsi_const9 */
506 COSTS_N_INSNS (5), /* muldi */
507 COSTS_N_INSNS (37), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (4), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 513 32, /* cache line size */
5f732aba
DE
514 8, /* l1 cache */
515 64, /* l2 cache */
0b11da67 516 1, /* streams */
8b897cfa
RS
517};
518
519/* Instruction costs on PPC604 processors. */
520static const
521struct processor_costs ppc604_cost = {
06a67bdd
RS
522 COSTS_N_INSNS (4), /* mulsi */
523 COSTS_N_INSNS (4), /* mulsi_const */
524 COSTS_N_INSNS (4), /* mulsi_const9 */
525 COSTS_N_INSNS (4), /* muldi */
526 COSTS_N_INSNS (20), /* divsi */
527 COSTS_N_INSNS (20), /* divdi */
528 COSTS_N_INSNS (3), /* fp */
529 COSTS_N_INSNS (3), /* dmul */
530 COSTS_N_INSNS (18), /* sdiv */
531 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 532 32, /* cache line size */
5f732aba
DE
533 16, /* l1 cache */
534 512, /* l2 cache */
0b11da67 535 1, /* streams */
8b897cfa
RS
536};
537
538/* Instruction costs on PPC604e processors. */
539static const
540struct processor_costs ppc604e_cost = {
06a67bdd
RS
541 COSTS_N_INSNS (2), /* mulsi */
542 COSTS_N_INSNS (2), /* mulsi_const */
543 COSTS_N_INSNS (2), /* mulsi_const9 */
544 COSTS_N_INSNS (2), /* muldi */
545 COSTS_N_INSNS (20), /* divsi */
546 COSTS_N_INSNS (20), /* divdi */
547 COSTS_N_INSNS (3), /* fp */
548 COSTS_N_INSNS (3), /* dmul */
549 COSTS_N_INSNS (18), /* sdiv */
550 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 551 32, /* cache line size */
5f732aba
DE
552 32, /* l1 cache */
553 1024, /* l2 cache */
0b11da67 554 1, /* streams */
8b897cfa
RS
555};
556
f0517163 557/* Instruction costs on PPC620 processors. */
8b897cfa
RS
558static const
559struct processor_costs ppc620_cost = {
06a67bdd
RS
560 COSTS_N_INSNS (5), /* mulsi */
561 COSTS_N_INSNS (4), /* mulsi_const */
562 COSTS_N_INSNS (3), /* mulsi_const9 */
563 COSTS_N_INSNS (7), /* muldi */
564 COSTS_N_INSNS (21), /* divsi */
565 COSTS_N_INSNS (37), /* divdi */
566 COSTS_N_INSNS (3), /* fp */
567 COSTS_N_INSNS (3), /* dmul */
568 COSTS_N_INSNS (18), /* sdiv */
569 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 570 128, /* cache line size */
5f732aba
DE
571 32, /* l1 cache */
572 1024, /* l2 cache */
0b11da67 573 1, /* streams */
f0517163
RS
574};
575
576/* Instruction costs on PPC630 processors. */
577static const
578struct processor_costs ppc630_cost = {
06a67bdd
RS
579 COSTS_N_INSNS (5), /* mulsi */
580 COSTS_N_INSNS (4), /* mulsi_const */
581 COSTS_N_INSNS (3), /* mulsi_const9 */
582 COSTS_N_INSNS (7), /* muldi */
583 COSTS_N_INSNS (21), /* divsi */
584 COSTS_N_INSNS (37), /* divdi */
585 COSTS_N_INSNS (3), /* fp */
586 COSTS_N_INSNS (3), /* dmul */
587 COSTS_N_INSNS (17), /* sdiv */
588 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 589 128, /* cache line size */
5f732aba
DE
590 64, /* l1 cache */
591 1024, /* l2 cache */
0b11da67 592 1, /* streams */
8b897cfa
RS
593};
594
d296e02e
AP
595/* Instruction costs on Cell processor. */
596/* COSTS_N_INSNS (1) ~ one add. */
597static const
598struct processor_costs ppccell_cost = {
599 COSTS_N_INSNS (9/2)+2, /* mulsi */
600 COSTS_N_INSNS (6/2), /* mulsi_const */
601 COSTS_N_INSNS (6/2), /* mulsi_const9 */
602 COSTS_N_INSNS (15/2)+2, /* muldi */
603 COSTS_N_INSNS (38/2), /* divsi */
604 COSTS_N_INSNS (70/2), /* divdi */
605 COSTS_N_INSNS (10/2), /* fp */
606 COSTS_N_INSNS (10/2), /* dmul */
607 COSTS_N_INSNS (74/2), /* sdiv */
608 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 609 128, /* cache line size */
5f732aba
DE
610 32, /* l1 cache */
611 512, /* l2 cache */
612 6, /* streams */
d296e02e
AP
613};
614
8b897cfa
RS
615/* Instruction costs on PPC750 and PPC7400 processors. */
616static const
617struct processor_costs ppc750_cost = {
06a67bdd
RS
618 COSTS_N_INSNS (5), /* mulsi */
619 COSTS_N_INSNS (3), /* mulsi_const */
620 COSTS_N_INSNS (2), /* mulsi_const9 */
621 COSTS_N_INSNS (5), /* muldi */
622 COSTS_N_INSNS (17), /* divsi */
623 COSTS_N_INSNS (17), /* divdi */
624 COSTS_N_INSNS (3), /* fp */
625 COSTS_N_INSNS (3), /* dmul */
626 COSTS_N_INSNS (17), /* sdiv */
627 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 628 32, /* cache line size */
5f732aba
DE
629 32, /* l1 cache */
630 512, /* l2 cache */
0b11da67 631 1, /* streams */
8b897cfa
RS
632};
633
634/* Instruction costs on PPC7450 processors. */
635static const
636struct processor_costs ppc7450_cost = {
06a67bdd
RS
637 COSTS_N_INSNS (4), /* mulsi */
638 COSTS_N_INSNS (3), /* mulsi_const */
639 COSTS_N_INSNS (3), /* mulsi_const9 */
640 COSTS_N_INSNS (4), /* muldi */
641 COSTS_N_INSNS (23), /* divsi */
642 COSTS_N_INSNS (23), /* divdi */
643 COSTS_N_INSNS (5), /* fp */
644 COSTS_N_INSNS (5), /* dmul */
645 COSTS_N_INSNS (21), /* sdiv */
646 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 647 32, /* cache line size */
5f732aba
DE
648 32, /* l1 cache */
649 1024, /* l2 cache */
0b11da67 650 1, /* streams */
8b897cfa 651};
a3170dc6 652
8b897cfa
RS
653/* Instruction costs on PPC8540 processors. */
654static const
655struct processor_costs ppc8540_cost = {
06a67bdd
RS
656 COSTS_N_INSNS (4), /* mulsi */
657 COSTS_N_INSNS (4), /* mulsi_const */
658 COSTS_N_INSNS (4), /* mulsi_const9 */
659 COSTS_N_INSNS (4), /* muldi */
660 COSTS_N_INSNS (19), /* divsi */
661 COSTS_N_INSNS (19), /* divdi */
662 COSTS_N_INSNS (4), /* fp */
663 COSTS_N_INSNS (4), /* dmul */
664 COSTS_N_INSNS (29), /* sdiv */
665 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 666 32, /* cache line size */
5f732aba
DE
667 32, /* l1 cache */
668 256, /* l2 cache */
0b11da67 669 1, /* prefetch streams /*/
8b897cfa
RS
670};
671
fa41c305
EW
672/* Instruction costs on E300C2 and E300C3 cores. */
673static const
674struct processor_costs ppce300c2c3_cost = {
675 COSTS_N_INSNS (4), /* mulsi */
676 COSTS_N_INSNS (4), /* mulsi_const */
677 COSTS_N_INSNS (4), /* mulsi_const9 */
678 COSTS_N_INSNS (4), /* muldi */
679 COSTS_N_INSNS (19), /* divsi */
680 COSTS_N_INSNS (19), /* divdi */
681 COSTS_N_INSNS (3), /* fp */
682 COSTS_N_INSNS (4), /* dmul */
683 COSTS_N_INSNS (18), /* sdiv */
684 COSTS_N_INSNS (33), /* ddiv */
642639ce 685 32,
a19b7d46
EW
686 16, /* l1 cache */
687 16, /* l2 cache */
642639ce 688 1, /* prefetch streams /*/
fa41c305
EW
689};
690
8b897cfa
RS
691/* Instruction costs on POWER4 and POWER5 processors. */
692static const
693struct processor_costs power4_cost = {
06a67bdd
RS
694 COSTS_N_INSNS (3), /* mulsi */
695 COSTS_N_INSNS (2), /* mulsi_const */
696 COSTS_N_INSNS (2), /* mulsi_const9 */
697 COSTS_N_INSNS (4), /* muldi */
698 COSTS_N_INSNS (18), /* divsi */
699 COSTS_N_INSNS (34), /* divdi */
700 COSTS_N_INSNS (3), /* fp */
701 COSTS_N_INSNS (3), /* dmul */
702 COSTS_N_INSNS (17), /* sdiv */
703 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 704 128, /* cache line size */
5f732aba
DE
705 32, /* l1 cache */
706 1024, /* l2 cache */
0b11da67 707 8, /* prefetch streams /*/
8b897cfa
RS
708};
709
44cd321e
PS
710/* Instruction costs on POWER6 processors. */
711static const
712struct processor_costs power6_cost = {
713 COSTS_N_INSNS (8), /* mulsi */
714 COSTS_N_INSNS (8), /* mulsi_const */
715 COSTS_N_INSNS (8), /* mulsi_const9 */
716 COSTS_N_INSNS (8), /* muldi */
717 COSTS_N_INSNS (22), /* divsi */
718 COSTS_N_INSNS (28), /* divdi */
719 COSTS_N_INSNS (3), /* fp */
720 COSTS_N_INSNS (3), /* dmul */
721 COSTS_N_INSNS (13), /* sdiv */
722 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 723 128, /* cache line size */
5f732aba
DE
724 64, /* l1 cache */
725 2048, /* l2 cache */
0b11da67 726 16, /* prefetch streams */
44cd321e
PS
727};
728
8b897cfa 729\f
a2369ed3 730static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 731static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 732static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
733static void rs6000_emit_stack_tie (void);
734static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 735static bool spe_func_has_64bit_regs_p (void);
b20a9cca 736static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 737 int, HOST_WIDE_INT);
a2369ed3
DJ
738static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
739static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
740static unsigned rs6000_hash_constant (rtx);
741static unsigned toc_hash_function (const void *);
742static int toc_hash_eq (const void *, const void *);
743static int constant_pool_expr_1 (rtx, int *, int *);
744static bool constant_pool_expr_p (rtx);
d04b6e6e 745static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
746static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
747static struct machine_function * rs6000_init_machine_status (void);
748static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 749static bool no_global_regs_above (int);
5add3202 750#ifdef HAVE_GAS_HIDDEN
a2369ed3 751static void rs6000_assemble_visibility (tree, int);
5add3202 752#endif
a2369ed3
DJ
753static int rs6000_ra_ever_killed (void);
754static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 755static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 756static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 757static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 758static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 759static const char *rs6000_mangle_type (const_tree);
b86fe7b4 760extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 761static void rs6000_set_default_type_attributes (tree);
52ff33d0 762static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
763static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
764static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
765static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
766 tree);
a2369ed3 767static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 768static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 769static void rs6000_file_start (void);
7c262518 770#if TARGET_ELF
9b580a0b 771static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
772static void rs6000_elf_asm_out_constructor (rtx, int);
773static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 774static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 775static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
776static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
777 unsigned HOST_WIDE_INT);
a56d7372 778static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 779 ATTRIBUTE_UNUSED;
7c262518 780#endif
3101faab 781static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
782static void rs6000_alloc_sdmode_stack_slot (void);
783static void rs6000_instantiate_decls (void);
cbaaba19 784#if TARGET_XCOFF
0d5817b2 785static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 786static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 787static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 788static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 789static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 790static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 791 unsigned HOST_WIDE_INT);
d6b5193b
RS
792static void rs6000_xcoff_unique_section (tree, int);
793static section *rs6000_xcoff_select_rtx_section
794 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
795static const char * rs6000_xcoff_strip_name_encoding (const char *);
796static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
797static void rs6000_xcoff_file_start (void);
798static void rs6000_xcoff_file_end (void);
f1384257 799#endif
a2369ed3
DJ
800static int rs6000_variable_issue (FILE *, int, rtx, int);
801static bool rs6000_rtx_costs (rtx, int, int, int *);
802static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 803static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 804static bool is_microcoded_insn (rtx);
d296e02e 805static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
806static bool is_cracked_insn (rtx);
807static bool is_branch_slot_insn (rtx);
44cd321e 808static bool is_load_insn (rtx);
e3a0e200 809static rtx get_store_dest (rtx pat);
44cd321e
PS
810static bool is_store_insn (rtx);
811static bool set_to_load_agen (rtx,rtx);
982afe02 812static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
813static int rs6000_adjust_priority (rtx, int);
814static int rs6000_issue_rate (void);
b198261f 815static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
816static rtx get_next_active_insn (rtx, rtx);
817static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
818static bool insn_must_be_first_in_group (rtx);
819static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
820static bool is_costly_group (rtx *, rtx);
821static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
822static int redefine_groups (FILE *, int, rtx, rtx);
823static int pad_groups (FILE *, int, rtx, rtx);
824static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
825static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
826static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 827static int rs6000_use_sched_lookahead (void);
d296e02e 828static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 829static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 830static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
831static tree rs6000_builtin_mul_widen_even (tree);
832static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 833static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 834
58646b77 835static void def_builtin (int, const char *, tree, int);
3101faab 836static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
837static void rs6000_init_builtins (void);
838static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
839static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
840static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
841static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
842static void altivec_init_builtins (void);
843static void rs6000_common_init_builtins (void);
c15c90bb 844static void rs6000_init_libfuncs (void);
a2369ed3 845
96038623
DE
846static void paired_init_builtins (void);
847static rtx paired_expand_builtin (tree, rtx, bool *);
848static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
849static rtx paired_expand_stv_builtin (enum insn_code, tree);
850static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
851
b20a9cca
AM
852static void enable_mask_for_builtins (struct builtin_description *, int,
853 enum rs6000_builtins,
854 enum rs6000_builtins);
7c62e993 855static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
856static void spe_init_builtins (void);
857static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 858static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
859static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
860static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
861static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
862static rs6000_stack_t *rs6000_stack_info (void);
863static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
864
865static rtx altivec_expand_builtin (tree, rtx, bool *);
866static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
867static rtx altivec_expand_st_builtin (tree, rtx, bool *);
868static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
869static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 870static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 871 const char *, tree, rtx);
b4a62fa0 872static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 873static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
874static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
875static rtx altivec_expand_vec_set_builtin (tree);
876static rtx altivec_expand_vec_ext_builtin (tree, rtx);
877static int get_element_number (tree, tree);
78f5898b 878static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 879static void rs6000_parse_tls_size_option (void);
5da702b1 880static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
881static int first_altivec_reg_to_save (void);
882static unsigned int compute_vrsave_mask (void);
9390387d 883static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
884static void is_altivec_return_reg (rtx, void *);
885static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
886int easy_vector_constant (rtx, enum machine_mode);
3101faab 887static bool rs6000_is_opaque_type (const_tree);
a2369ed3 888static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 889static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 890static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 891static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
892static rtx rs6000_tls_get_addr (void);
893static rtx rs6000_got_sym (void);
9390387d 894static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
895static const char *rs6000_get_some_local_dynamic_name (void);
896static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 897static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 898static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 899 enum machine_mode, tree);
0b5383eb
DJ
900static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
901 HOST_WIDE_INT);
902static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
903 tree, HOST_WIDE_INT);
904static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
905 HOST_WIDE_INT,
906 rtx[], int *);
907static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
908 const_tree, HOST_WIDE_INT,
909 rtx[], int *);
910static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 911static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 912static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
913static void setup_incoming_varargs (CUMULATIVE_ARGS *,
914 enum machine_mode, tree,
915 int *, int);
8cd5a4e0 916static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 917 const_tree, bool);
78a52f11
RH
918static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
919 tree, bool);
3101faab 920static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
921#if TARGET_MACHO
922static void macho_branch_islands (void);
efdba735
SH
923static int no_previous_def (tree function_name);
924static tree get_prev_label (tree function_name);
c4e18b1c 925static void rs6000_darwin_file_start (void);
efdba735
SH
926#endif
927
c35d187f 928static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 929static void rs6000_va_start (tree, rtx);
23a60a04 930static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 931static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 932static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 933static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 934static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 935 enum machine_mode);
94ff898d 936static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
937 enum machine_mode);
938static int get_vsel_insn (enum machine_mode);
939static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 940static tree rs6000_stack_protect_fail (void);
21213b4c
DP
941
942const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
943static enum machine_mode rs6000_eh_return_filter_mode (void);
944
17211ab5
GK
945/* Hash table stuff for keeping track of TOC entries. */
946
947struct toc_hash_struct GTY(())
948{
949 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
950 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
951 rtx key;
952 enum machine_mode key_mode;
953 int labelno;
954};
955
956static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
957\f
958/* Default register names. */
959char rs6000_reg_names[][8] =
960{
802a0058
MM
961 "0", "1", "2", "3", "4", "5", "6", "7",
962 "8", "9", "10", "11", "12", "13", "14", "15",
963 "16", "17", "18", "19", "20", "21", "22", "23",
964 "24", "25", "26", "27", "28", "29", "30", "31",
965 "0", "1", "2", "3", "4", "5", "6", "7",
966 "8", "9", "10", "11", "12", "13", "14", "15",
967 "16", "17", "18", "19", "20", "21", "22", "23",
968 "24", "25", "26", "27", "28", "29", "30", "31",
969 "mq", "lr", "ctr","ap",
970 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
971 "xer",
972 /* AltiVec registers. */
0cd5e3a1
AH
973 "0", "1", "2", "3", "4", "5", "6", "7",
974 "8", "9", "10", "11", "12", "13", "14", "15",
975 "16", "17", "18", "19", "20", "21", "22", "23",
976 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
977 "vrsave", "vscr",
978 /* SPE registers. */
7d5175e1
JJ
979 "spe_acc", "spefscr",
980 /* Soft frame pointer. */
981 "sfp"
c81bebd7
MM
982};
983
984#ifdef TARGET_REGNAMES
8b60264b 985static const char alt_reg_names[][8] =
c81bebd7 986{
802a0058
MM
987 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
988 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
989 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
990 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
991 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
992 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
993 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
994 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
995 "mq", "lr", "ctr", "ap",
996 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 997 "xer",
59a4c851 998 /* AltiVec registers. */
0ac081f6 999 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1000 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1001 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1002 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1003 "vrsave", "vscr",
1004 /* SPE registers. */
7d5175e1
JJ
1005 "spe_acc", "spefscr",
1006 /* Soft frame pointer. */
1007 "sfp"
c81bebd7
MM
1008};
1009#endif
9878760c 1010\f
daf11973
MM
1011#ifndef MASK_STRICT_ALIGN
1012#define MASK_STRICT_ALIGN 0
1013#endif
ffcfcb5f
AM
1014#ifndef TARGET_PROFILE_KERNEL
1015#define TARGET_PROFILE_KERNEL 0
1016#endif
3961e8fe
RH
1017
1018/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1019#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1020\f
1021/* Initialize the GCC target structure. */
91d231cb
JM
1022#undef TARGET_ATTRIBUTE_TABLE
1023#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1024#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1025#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1026
301d03af
RS
1027#undef TARGET_ASM_ALIGNED_DI_OP
1028#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1029
1030/* Default unaligned ops are only provided for ELF. Find the ops needed
1031 for non-ELF systems. */
1032#ifndef OBJECT_FORMAT_ELF
cbaaba19 1033#if TARGET_XCOFF
ae6c1efd 1034/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1035 64-bit targets. */
1036#undef TARGET_ASM_UNALIGNED_HI_OP
1037#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1038#undef TARGET_ASM_UNALIGNED_SI_OP
1039#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1040#undef TARGET_ASM_UNALIGNED_DI_OP
1041#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1042#else
1043/* For Darwin. */
1044#undef TARGET_ASM_UNALIGNED_HI_OP
1045#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1046#undef TARGET_ASM_UNALIGNED_SI_OP
1047#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1048#undef TARGET_ASM_UNALIGNED_DI_OP
1049#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1050#undef TARGET_ASM_ALIGNED_DI_OP
1051#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1052#endif
1053#endif
1054
1055/* This hook deals with fixups for relocatable code and DI-mode objects
1056 in 64-bit code. */
1057#undef TARGET_ASM_INTEGER
1058#define TARGET_ASM_INTEGER rs6000_assemble_integer
1059
93638d7a
AM
1060#ifdef HAVE_GAS_HIDDEN
1061#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1062#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1063#endif
1064
c4501e62
JJ
1065#undef TARGET_HAVE_TLS
1066#define TARGET_HAVE_TLS HAVE_AS_TLS
1067
1068#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1069#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1070
08c148a8
NB
1071#undef TARGET_ASM_FUNCTION_PROLOGUE
1072#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1073#undef TARGET_ASM_FUNCTION_EPILOGUE
1074#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1075
b54cf83a
DE
1076#undef TARGET_SCHED_VARIABLE_ISSUE
1077#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1078
c237e94a
ZW
1079#undef TARGET_SCHED_ISSUE_RATE
1080#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1081#undef TARGET_SCHED_ADJUST_COST
1082#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1083#undef TARGET_SCHED_ADJUST_PRIORITY
1084#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1085#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1086#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1087#undef TARGET_SCHED_INIT
1088#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1089#undef TARGET_SCHED_FINISH
1090#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1091#undef TARGET_SCHED_REORDER
1092#define TARGET_SCHED_REORDER rs6000_sched_reorder
1093#undef TARGET_SCHED_REORDER2
1094#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1095
be12c2b0
VM
1096#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1097#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1098
d296e02e
AP
1099#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1100#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1101
7ccf35ed
DN
1102#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1103#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1104#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1105#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1106#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1107#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1108#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1109#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1110
5b900a4c
DN
1111#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1112#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1113
0ac081f6
AH
1114#undef TARGET_INIT_BUILTINS
1115#define TARGET_INIT_BUILTINS rs6000_init_builtins
1116
1117#undef TARGET_EXPAND_BUILTIN
1118#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1119
608063c3
JB
1120#undef TARGET_MANGLE_TYPE
1121#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1122
c15c90bb
ZW
1123#undef TARGET_INIT_LIBFUNCS
1124#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1125
f1384257 1126#if TARGET_MACHO
0e5dbd9b 1127#undef TARGET_BINDS_LOCAL_P
31920d83 1128#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1129#endif
0e5dbd9b 1130
77ccdfed
EC
1131#undef TARGET_MS_BITFIELD_LAYOUT_P
1132#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1133
3961e8fe
RH
1134#undef TARGET_ASM_OUTPUT_MI_THUNK
1135#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1136
3961e8fe 1137#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1138#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1139
4977bab6
ZW
1140#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1141#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1142
2e3f0db6
DJ
1143#undef TARGET_INVALID_WITHIN_DOLOOP
1144#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1145
3c50106f
RH
1146#undef TARGET_RTX_COSTS
1147#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1148#undef TARGET_ADDRESS_COST
1149#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1150
c8e4f0e9 1151#undef TARGET_VECTOR_OPAQUE_P
58646b77 1152#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1153
96714395
AH
1154#undef TARGET_DWARF_REGISTER_SPAN
1155#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1156
37ea0b7e
JM
1157#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1158#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1159
c6e8c921
GK
1160/* On rs6000, function arguments are promoted, as are function return
1161 values. */
1162#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1163#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1164#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1165#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1166
c6e8c921
GK
1167#undef TARGET_RETURN_IN_MEMORY
1168#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1169
1170#undef TARGET_SETUP_INCOMING_VARARGS
1171#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1172
1173/* Always strict argument naming on rs6000. */
1174#undef TARGET_STRICT_ARGUMENT_NAMING
1175#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1176#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1177#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1178#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1179#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1180#undef TARGET_MUST_PASS_IN_STACK
1181#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1182#undef TARGET_PASS_BY_REFERENCE
1183#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1184#undef TARGET_ARG_PARTIAL_BYTES
1185#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1186
c35d187f
RH
1187#undef TARGET_BUILD_BUILTIN_VA_LIST
1188#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1189
d7bd8aeb
JJ
1190#undef TARGET_EXPAND_BUILTIN_VA_START
1191#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1192
cd3ce9b4
JM
1193#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1194#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1195
93f90be6
FJ
1196#undef TARGET_EH_RETURN_FILTER_MODE
1197#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1198
00b79d54
BE
1199#undef TARGET_SCALAR_MODE_SUPPORTED_P
1200#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1201
f676971a
EC
1202#undef TARGET_VECTOR_MODE_SUPPORTED_P
1203#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1204
4d3e6fae
FJ
1205#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1206#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1207
78f5898b
AH
1208#undef TARGET_HANDLE_OPTION
1209#define TARGET_HANDLE_OPTION rs6000_handle_option
1210
1211#undef TARGET_DEFAULT_TARGET_FLAGS
1212#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1213 (TARGET_DEFAULT)
78f5898b 1214
3aebbe5f
JJ
1215#undef TARGET_STACK_PROTECT_FAIL
1216#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1217
445cf5eb
JM
1218/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1219 The PowerPC architecture requires only weak consistency among
1220 processors--that is, memory accesses between processors need not be
1221 sequentially consistent and memory accesses among processors can occur
1222 in any order. The ability to order memory accesses weakly provides
1223 opportunities for more efficient use of the system bus. Unless a
1224 dependency exists, the 604e allows read operations to precede store
1225 operations. */
1226#undef TARGET_RELAXED_ORDERING
1227#define TARGET_RELAXED_ORDERING true
1228
fdbe66f2
EB
1229#ifdef HAVE_AS_TLS
1230#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1231#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1232#endif
1233
aacd3885
RS
1234/* Use a 32-bit anchor range. This leads to sequences like:
1235
1236 addis tmp,anchor,high
1237 add dest,tmp,low
1238
1239 where tmp itself acts as an anchor, and can be shared between
1240 accesses to the same 64k page. */
1241#undef TARGET_MIN_ANCHOR_OFFSET
1242#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1243#undef TARGET_MAX_ANCHOR_OFFSET
1244#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1245#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1246#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1247
9c78b944
DE
1248#undef TARGET_BUILTIN_RECIPROCAL
1249#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1250
e41b2a33
PB
1251#undef TARGET_EXPAND_TO_RTL_HOOK
1252#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1253
1254#undef TARGET_INSTANTIATE_DECLS
1255#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1256
f6897b10 1257struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1258\f
0d1fbc8c
AH
1259
1260/* Value is 1 if hard register REGNO can hold a value of machine-mode
1261 MODE. */
1262static int
1263rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1264{
1265 /* The GPRs can hold any mode, but values bigger than one register
1266 cannot go past R31. */
1267 if (INT_REGNO_P (regno))
1268 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1269
a5a97921 1270 /* The float registers can only hold floating modes and DImode.
7393f7f8 1271 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1272 if (FP_REGNO_P (regno))
1273 return
96038623 1274 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1275 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1276 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1277 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1278 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1279 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1280 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1281
1282 /* The CR register can only hold CC modes. */
1283 if (CR_REGNO_P (regno))
1284 return GET_MODE_CLASS (mode) == MODE_CC;
1285
1286 if (XER_REGNO_P (regno))
1287 return mode == PSImode;
1288
1289 /* AltiVec only in AldyVec registers. */
1290 if (ALTIVEC_REGNO_P (regno))
1291 return ALTIVEC_VECTOR_MODE (mode);
1292
1293 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1294 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1295 return 1;
1296
1297 /* We cannot put TImode anywhere except general register and it must be
1298 able to fit within the register set. */
1299
1300 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1301}
1302
1303/* Initialize rs6000_hard_regno_mode_ok_p table. */
1304static void
1305rs6000_init_hard_regno_mode_ok (void)
1306{
1307 int r, m;
1308
1309 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1310 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1311 if (rs6000_hard_regno_mode_ok (r, m))
1312 rs6000_hard_regno_mode_ok_p[m][r] = true;
1313}
1314
e4cad568
GK
1315#if TARGET_MACHO
1316/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1317
1318static void
1319darwin_rs6000_override_options (void)
1320{
1321 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1322 off. */
1323 rs6000_altivec_abi = 1;
1324 TARGET_ALTIVEC_VRSAVE = 1;
1325 if (DEFAULT_ABI == ABI_DARWIN)
1326 {
1327 if (MACHO_DYNAMIC_NO_PIC_P)
1328 {
1329 if (flag_pic)
1330 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1331 flag_pic = 0;
1332 }
1333 else if (flag_pic == 1)
1334 {
1335 flag_pic = 2;
1336 }
1337 }
1338 if (TARGET_64BIT && ! TARGET_POWERPC64)
1339 {
1340 target_flags |= MASK_POWERPC64;
1341 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1342 }
1343 if (flag_mkernel)
1344 {
1345 rs6000_default_long_calls = 1;
1346 target_flags |= MASK_SOFT_FLOAT;
1347 }
1348
1349 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1350 Altivec. */
1351 if (!flag_mkernel && !flag_apple_kext
1352 && TARGET_64BIT
1353 && ! (target_flags_explicit & MASK_ALTIVEC))
1354 target_flags |= MASK_ALTIVEC;
1355
1356 /* Unless the user (not the configurer) has explicitly overridden
1357 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1358 G4 unless targetting the kernel. */
1359 if (!flag_mkernel
1360 && !flag_apple_kext
1361 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1362 && ! (target_flags_explicit & MASK_ALTIVEC)
1363 && ! rs6000_select[1].string)
1364 {
1365 target_flags |= MASK_ALTIVEC;
1366 }
1367}
1368#endif
1369
c1e55850
GK
1370/* If not otherwise specified by a target, make 'long double' equivalent to
1371 'double'. */
1372
1373#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1374#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1375#endif
1376
5248c961
RK
1377/* Override command line options. Mostly we process the processor
1378 type and sometimes adjust other TARGET_ options. */
1379
1380void
d779d0dc 1381rs6000_override_options (const char *default_cpu)
5248c961 1382{
c4d38ccb 1383 size_t i, j;
8e3f41e7 1384 struct rs6000_cpu_select *ptr;
66188a7e 1385 int set_masks;
5248c961 1386
66188a7e 1387 /* Simplifications for entries below. */
85638c0d 1388
66188a7e
GK
1389 enum {
1390 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1391 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1392 };
85638c0d 1393
66188a7e
GK
1394 /* This table occasionally claims that a processor does not support
1395 a particular feature even though it does, but the feature is slower
1396 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1397 complete description of the processor's support.
66188a7e
GK
1398
1399 Please keep this list in order, and don't forget to update the
1400 documentation in invoke.texi when adding a new processor or
1401 flag. */
5248c961
RK
1402 static struct ptt
1403 {
8b60264b
KG
1404 const char *const name; /* Canonical processor name. */
1405 const enum processor_type processor; /* Processor type enum value. */
1406 const int target_enable; /* Target flags to enable. */
8b60264b 1407 } const processor_target_table[]
66188a7e 1408 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1409 {"403", PROCESSOR_PPC403,
66188a7e 1410 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1411 {"405", PROCESSOR_PPC405,
716019c0
JM
1412 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1413 {"405fp", PROCESSOR_PPC405,
1414 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1415 {"440", PROCESSOR_PPC440,
716019c0
JM
1416 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1417 {"440fp", PROCESSOR_PPC440,
1418 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1419 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1420 {"601", PROCESSOR_PPC601,
66188a7e
GK
1421 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1422 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1423 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1424 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1425 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1426 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1427 {"620", PROCESSOR_PPC620,
1428 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1429 {"630", PROCESSOR_PPC630,
1430 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1431 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1432 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1433 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1434 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1435 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1436 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1437 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1438 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1439 /* 8548 has a dummy entry for now. */
a45bce6e 1440 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1441 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1442 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
66188a7e 1443 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1444 {"970", PROCESSOR_POWER4,
66188a7e 1445 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1446 {"cell", PROCESSOR_CELL,
1447 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1448 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1449 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1450 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1451 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1452 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1453 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1454 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1455 {"power2", PROCESSOR_POWER,
1456 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1457 {"power3", PROCESSOR_PPC630,
1458 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1459 {"power4", PROCESSOR_POWER4,
fc091c8e 1460 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1461 {"power5", PROCESSOR_POWER5,
432218ba
DE
1462 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1463 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1464 {"power5+", PROCESSOR_POWER5,
1465 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1466 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1467 {"power6", PROCESSOR_POWER6,
e118597e 1468 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1469 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1470 {"power6x", PROCESSOR_POWER6,
1471 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1472 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1473 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1474 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1476 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1477 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1478 {"rios2", PROCESSOR_RIOS2,
1479 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1480 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1481 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1482 {"rs64", PROCESSOR_RS64A,
1483 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1484 };
5248c961 1485
ca7558fc 1486 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1487
66188a7e
GK
1488 /* Some OSs don't support saving the high part of 64-bit registers on
1489 context switch. Other OSs don't support saving Altivec registers.
1490 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1491 settings; if the user wants either, the user must explicitly specify
1492 them and we won't interfere with the user's specification. */
1493
1494 enum {
1495 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1496 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1497 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1498 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1499 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1500 };
0d1fbc8c
AH
1501
1502 rs6000_init_hard_regno_mode_ok ();
1503
c4ad648e 1504 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1505#ifdef OS_MISSING_POWERPC64
1506 if (OS_MISSING_POWERPC64)
1507 set_masks &= ~MASK_POWERPC64;
1508#endif
1509#ifdef OS_MISSING_ALTIVEC
1510 if (OS_MISSING_ALTIVEC)
1511 set_masks &= ~MASK_ALTIVEC;
1512#endif
1513
768875a8
AM
1514 /* Don't override by the processor default if given explicitly. */
1515 set_masks &= ~target_flags_explicit;
957211c3 1516
a4f6c312 1517 /* Identify the processor type. */
8e3f41e7 1518 rs6000_select[0].string = default_cpu;
3cb999d8 1519 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1520
b6a1cbae 1521 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1522 {
8e3f41e7
MM
1523 ptr = &rs6000_select[i];
1524 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1525 {
8e3f41e7
MM
1526 for (j = 0; j < ptt_size; j++)
1527 if (! strcmp (ptr->string, processor_target_table[j].name))
1528 {
1529 if (ptr->set_tune_p)
1530 rs6000_cpu = processor_target_table[j].processor;
1531
1532 if (ptr->set_arch_p)
1533 {
66188a7e
GK
1534 target_flags &= ~set_masks;
1535 target_flags |= (processor_target_table[j].target_enable
1536 & set_masks);
8e3f41e7
MM
1537 }
1538 break;
1539 }
1540
4406229e 1541 if (j == ptt_size)
8e3f41e7 1542 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1543 }
1544 }
8a61d227 1545
993f19a8 1546 if (TARGET_E500)
a3170dc6
AH
1547 rs6000_isel = 1;
1548
fa41c305
EW
1549 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3)
1550 {
1551 if (TARGET_ALTIVEC)
1552 error ("AltiVec not supported in this target");
1553 if (TARGET_SPE)
1554 error ("Spe not supported in this target");
1555 }
1556
dff9f1b6
DE
1557 /* If we are optimizing big endian systems for space, use the load/store
1558 multiple and string instructions. */
ef792183 1559 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1560 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1561
a4f6c312
SS
1562 /* Don't allow -mmultiple or -mstring on little endian systems
1563 unless the cpu is a 750, because the hardware doesn't support the
1564 instructions used in little endian mode, and causes an alignment
1565 trap. The 750 does not cause an alignment trap (except when the
1566 target is unaligned). */
bef84347 1567
b21fb038 1568 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1569 {
1570 if (TARGET_MULTIPLE)
1571 {
1572 target_flags &= ~MASK_MULTIPLE;
b21fb038 1573 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1574 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1575 }
1576
1577 if (TARGET_STRING)
1578 {
1579 target_flags &= ~MASK_STRING;
b21fb038 1580 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1581 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1582 }
1583 }
3933e0e1 1584
38c1f2d7
MM
1585 /* Set debug flags */
1586 if (rs6000_debug_name)
1587 {
bfc79d3b 1588 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1589 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1590 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1591 rs6000_debug_stack = 1;
bfc79d3b 1592 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1593 rs6000_debug_arg = 1;
1594 else
c725bd79 1595 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1596 }
1597
57ac7be9
AM
1598 if (rs6000_traceback_name)
1599 {
1600 if (! strncmp (rs6000_traceback_name, "full", 4))
1601 rs6000_traceback = traceback_full;
1602 else if (! strncmp (rs6000_traceback_name, "part", 4))
1603 rs6000_traceback = traceback_part;
1604 else if (! strncmp (rs6000_traceback_name, "no", 2))
1605 rs6000_traceback = traceback_none;
1606 else
9e637a26 1607 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1608 rs6000_traceback_name);
1609 }
1610
78f5898b
AH
1611 if (!rs6000_explicit_options.long_double)
1612 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1613
602ea4d3 1614#ifndef POWERPC_LINUX
d3603e8c 1615 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1616 rs6000_ieeequad = 1;
1617#endif
1618
0db747be
DE
1619 /* Enable Altivec ABI for AIX -maltivec. */
1620 if (TARGET_XCOFF && TARGET_ALTIVEC)
1621 rs6000_altivec_abi = 1;
1622
a2db2771
JJ
1623 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1624 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1625 be explicitly overridden in either case. */
1626 if (TARGET_ELF)
6d0ef01e 1627 {
a2db2771
JJ
1628 if (!rs6000_explicit_options.altivec_abi
1629 && (TARGET_64BIT || TARGET_ALTIVEC))
1630 rs6000_altivec_abi = 1;
1631
1632 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1633 if (!rs6000_explicit_options.vrsave)
1634 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1635 }
1636
594a51fe
SS
1637 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1638 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1639 {
1640 rs6000_darwin64_abi = 1;
9c7956fd 1641#if TARGET_MACHO
6ac49599 1642 darwin_one_byte_bool = 1;
9c7956fd 1643#endif
d9168963
SS
1644 /* Default to natural alignment, for better performance. */
1645 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1646 }
1647
194c524a
DE
1648 /* Place FP constants in the constant pool instead of TOC
1649 if section anchors enabled. */
1650 if (flag_section_anchors)
1651 TARGET_NO_FP_IN_TOC = 1;
1652
c4501e62
JJ
1653 /* Handle -mtls-size option. */
1654 rs6000_parse_tls_size_option ();
1655
a7ae18e2
AH
1656#ifdef SUBTARGET_OVERRIDE_OPTIONS
1657 SUBTARGET_OVERRIDE_OPTIONS;
1658#endif
1659#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1660 SUBSUBTARGET_OVERRIDE_OPTIONS;
1661#endif
4d4cbc0e
AH
1662#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1663 SUB3TARGET_OVERRIDE_OPTIONS;
1664#endif
a7ae18e2 1665
5da702b1
AH
1666 if (TARGET_E500)
1667 {
1668 /* The e500 does not have string instructions, and we set
1669 MASK_STRING above when optimizing for size. */
1670 if ((target_flags & MASK_STRING) != 0)
1671 target_flags = target_flags & ~MASK_STRING;
1672 }
1673 else if (rs6000_select[1].string != NULL)
1674 {
1675 /* For the powerpc-eabispe configuration, we set all these by
1676 default, so let's unset them if we manually set another
1677 CPU that is not the E500. */
a2db2771 1678 if (!rs6000_explicit_options.spe_abi)
5da702b1 1679 rs6000_spe_abi = 0;
78f5898b 1680 if (!rs6000_explicit_options.spe)
5da702b1 1681 rs6000_spe = 0;
78f5898b 1682 if (!rs6000_explicit_options.float_gprs)
5da702b1 1683 rs6000_float_gprs = 0;
78f5898b 1684 if (!rs6000_explicit_options.isel)
5da702b1
AH
1685 rs6000_isel = 0;
1686 }
b5044283 1687
eca0d5e8
JM
1688 /* Detect invalid option combinations with E500. */
1689 CHECK_E500_OPTIONS;
1690
ec507f2d 1691 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1692 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1693 && rs6000_cpu != PROCESSOR_POWER6
1694 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1695 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1696 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1697 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1698 || rs6000_cpu == PROCESSOR_POWER5
1699 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1700
ec507f2d
DE
1701 rs6000_sched_restricted_insns_priority
1702 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1703
569fa502 1704 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1705 rs6000_sched_costly_dep
1706 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1707
569fa502
DN
1708 if (rs6000_sched_costly_dep_str)
1709 {
f676971a 1710 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1711 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1712 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1713 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1714 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1715 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1716 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1717 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1718 else
c4ad648e 1719 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1720 }
1721
1722 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1723 rs6000_sched_insert_nops
1724 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1725
cbe26ab8
DN
1726 if (rs6000_sched_insert_nops_str)
1727 {
1728 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1729 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1730 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1731 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1732 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1733 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1734 else
c4ad648e 1735 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1736 }
1737
c81bebd7 1738#ifdef TARGET_REGNAMES
a4f6c312
SS
1739 /* If the user desires alternate register names, copy in the
1740 alternate names now. */
c81bebd7 1741 if (TARGET_REGNAMES)
4e135bdd 1742 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1743#endif
1744
df01da37 1745 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1746 If -maix-struct-return or -msvr4-struct-return was explicitly
1747 used, don't override with the ABI default. */
df01da37
DE
1748 if (!rs6000_explicit_options.aix_struct_ret)
1749 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1750
602ea4d3 1751 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1752 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1753
f676971a 1754 if (TARGET_TOC)
9ebbca7d 1755 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1756
301d03af
RS
1757 /* We can only guarantee the availability of DI pseudo-ops when
1758 assembling for 64-bit targets. */
ae6c1efd 1759 if (!TARGET_64BIT)
301d03af
RS
1760 {
1761 targetm.asm_out.aligned_op.di = NULL;
1762 targetm.asm_out.unaligned_op.di = NULL;
1763 }
1764
1494c534
DE
1765 /* Set branch target alignment, if not optimizing for size. */
1766 if (!optimize_size)
1767 {
d296e02e
AP
1768 /* Cell wants to be aligned 8byte for dual issue. */
1769 if (rs6000_cpu == PROCESSOR_CELL)
1770 {
1771 if (align_functions <= 0)
1772 align_functions = 8;
1773 if (align_jumps <= 0)
1774 align_jumps = 8;
1775 if (align_loops <= 0)
1776 align_loops = 8;
1777 }
44cd321e 1778 if (rs6000_align_branch_targets)
1494c534
DE
1779 {
1780 if (align_functions <= 0)
1781 align_functions = 16;
1782 if (align_jumps <= 0)
1783 align_jumps = 16;
1784 if (align_loops <= 0)
1785 align_loops = 16;
1786 }
1787 if (align_jumps_max_skip <= 0)
1788 align_jumps_max_skip = 15;
1789 if (align_loops_max_skip <= 0)
1790 align_loops_max_skip = 15;
1791 }
2792d578 1792
71f123ca
FS
1793 /* Arrange to save and restore machine status around nested functions. */
1794 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1795
1796 /* We should always be splitting complex arguments, but we can't break
1797 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1798 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1799 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1800
1801 /* Initialize rs6000_cost with the appropriate target costs. */
1802 if (optimize_size)
1803 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1804 else
1805 switch (rs6000_cpu)
1806 {
1807 case PROCESSOR_RIOS1:
1808 rs6000_cost = &rios1_cost;
1809 break;
1810
1811 case PROCESSOR_RIOS2:
1812 rs6000_cost = &rios2_cost;
1813 break;
1814
1815 case PROCESSOR_RS64A:
1816 rs6000_cost = &rs64a_cost;
1817 break;
1818
1819 case PROCESSOR_MPCCORE:
1820 rs6000_cost = &mpccore_cost;
1821 break;
1822
1823 case PROCESSOR_PPC403:
1824 rs6000_cost = &ppc403_cost;
1825 break;
1826
1827 case PROCESSOR_PPC405:
1828 rs6000_cost = &ppc405_cost;
1829 break;
1830
1831 case PROCESSOR_PPC440:
1832 rs6000_cost = &ppc440_cost;
1833 break;
1834
1835 case PROCESSOR_PPC601:
1836 rs6000_cost = &ppc601_cost;
1837 break;
1838
1839 case PROCESSOR_PPC603:
1840 rs6000_cost = &ppc603_cost;
1841 break;
1842
1843 case PROCESSOR_PPC604:
1844 rs6000_cost = &ppc604_cost;
1845 break;
1846
1847 case PROCESSOR_PPC604e:
1848 rs6000_cost = &ppc604e_cost;
1849 break;
1850
1851 case PROCESSOR_PPC620:
8b897cfa
RS
1852 rs6000_cost = &ppc620_cost;
1853 break;
1854
f0517163
RS
1855 case PROCESSOR_PPC630:
1856 rs6000_cost = &ppc630_cost;
1857 break;
1858
982afe02 1859 case PROCESSOR_CELL:
d296e02e
AP
1860 rs6000_cost = &ppccell_cost;
1861 break;
1862
8b897cfa
RS
1863 case PROCESSOR_PPC750:
1864 case PROCESSOR_PPC7400:
1865 rs6000_cost = &ppc750_cost;
1866 break;
1867
1868 case PROCESSOR_PPC7450:
1869 rs6000_cost = &ppc7450_cost;
1870 break;
1871
1872 case PROCESSOR_PPC8540:
1873 rs6000_cost = &ppc8540_cost;
1874 break;
1875
fa41c305
EW
1876 case PROCESSOR_PPCE300C2:
1877 case PROCESSOR_PPCE300C3:
1878 rs6000_cost = &ppce300c2c3_cost;
1879 break;
1880
8b897cfa
RS
1881 case PROCESSOR_POWER4:
1882 case PROCESSOR_POWER5:
1883 rs6000_cost = &power4_cost;
1884 break;
1885
44cd321e
PS
1886 case PROCESSOR_POWER6:
1887 rs6000_cost = &power6_cost;
1888 break;
1889
8b897cfa 1890 default:
37409796 1891 gcc_unreachable ();
8b897cfa 1892 }
0b11da67
DE
1893
1894 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1895 set_param_value ("simultaneous-prefetches",
1896 rs6000_cost->simultaneous_prefetches);
1897 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1898 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1899 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1900 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1901 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1902 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1903
1904 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1905 can be optimized to ap = __builtin_next_arg (0). */
1906 if (DEFAULT_ABI != ABI_V4)
1907 targetm.expand_builtin_va_start = NULL;
5248c961 1908}
5accd822 1909
7ccf35ed
DN
1910/* Implement targetm.vectorize.builtin_mask_for_load. */
1911static tree
1912rs6000_builtin_mask_for_load (void)
1913{
1914 if (TARGET_ALTIVEC)
1915 return altivec_builtin_mask_for_load;
1916 else
1917 return 0;
1918}
1919
f57d17f1
TM
1920/* Implement targetm.vectorize.builtin_conversion. */
1921static tree
1922rs6000_builtin_conversion (enum tree_code code, tree type)
1923{
1924 if (!TARGET_ALTIVEC)
1925 return NULL_TREE;
982afe02 1926
f57d17f1
TM
1927 switch (code)
1928 {
1929 case FLOAT_EXPR:
1930 switch (TYPE_MODE (type))
1931 {
1932 case V4SImode:
982afe02 1933 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1934 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1935 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1936 default:
1937 return NULL_TREE;
1938 }
1939 default:
1940 return NULL_TREE;
1941 }
1942}
1943
89d67cca
DN
1944/* Implement targetm.vectorize.builtin_mul_widen_even. */
1945static tree
1946rs6000_builtin_mul_widen_even (tree type)
1947{
1948 if (!TARGET_ALTIVEC)
1949 return NULL_TREE;
1950
1951 switch (TYPE_MODE (type))
1952 {
1953 case V8HImode:
982afe02 1954 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1955 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1956 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1957
1958 case V16QImode:
1959 return TYPE_UNSIGNED (type) ?
1960 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1961 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1962 default:
1963 return NULL_TREE;
1964 }
1965}
1966
1967/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1968static tree
1969rs6000_builtin_mul_widen_odd (tree type)
1970{
1971 if (!TARGET_ALTIVEC)
1972 return NULL_TREE;
1973
1974 switch (TYPE_MODE (type))
1975 {
1976 case V8HImode:
1977 return TYPE_UNSIGNED (type) ?
1978 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1979 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1980
1981 case V16QImode:
1982 return TYPE_UNSIGNED (type) ?
1983 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1984 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1985 default:
1986 return NULL_TREE;
1987 }
1988}
1989
5b900a4c
DN
1990
1991/* Return true iff, data reference of TYPE can reach vector alignment (16)
1992 after applying N number of iterations. This routine does not determine
1993 how may iterations are required to reach desired alignment. */
1994
1995static bool
3101faab 1996rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1997{
1998 if (is_packed)
1999 return false;
2000
2001 if (TARGET_32BIT)
2002 {
2003 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2004 return true;
2005
2006 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2007 return true;
2008
2009 return false;
2010 }
2011 else
2012 {
2013 if (TARGET_MACHO)
2014 return false;
2015
2016 /* Assuming that all other types are naturally aligned. CHECKME! */
2017 return true;
2018 }
2019}
2020
5da702b1
AH
2021/* Handle generic options of the form -mfoo=yes/no.
2022 NAME is the option name.
2023 VALUE is the option value.
2024 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2025 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2026static void
5da702b1 2027rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2028{
5da702b1 2029 if (value == 0)
993f19a8 2030 return;
5da702b1
AH
2031 else if (!strcmp (value, "yes"))
2032 *flag = 1;
2033 else if (!strcmp (value, "no"))
2034 *flag = 0;
08b57fb3 2035 else
5da702b1 2036 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2037}
2038
c4501e62
JJ
2039/* Validate and record the size specified with the -mtls-size option. */
2040
2041static void
863d938c 2042rs6000_parse_tls_size_option (void)
c4501e62
JJ
2043{
2044 if (rs6000_tls_size_string == 0)
2045 return;
2046 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2047 rs6000_tls_size = 16;
2048 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2049 rs6000_tls_size = 32;
2050 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2051 rs6000_tls_size = 64;
2052 else
9e637a26 2053 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2054}
2055
5accd822 2056void
a2369ed3 2057optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2058{
2e3f0db6
DJ
2059 if (DEFAULT_ABI == ABI_DARWIN)
2060 /* The Darwin libraries never set errno, so we might as well
2061 avoid calling them when that's the only reason we would. */
2062 flag_errno_math = 0;
59d6560b
DE
2063
2064 /* Double growth factor to counter reduced min jump length. */
2065 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2066
2067 /* Enable section anchors by default.
2068 Skip section anchors for Objective C and Objective C++
2069 until front-ends fixed. */
23f99493 2070 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2071 flag_section_anchors = 1;
5accd822 2072}
78f5898b
AH
2073
2074/* Implement TARGET_HANDLE_OPTION. */
2075
2076static bool
2077rs6000_handle_option (size_t code, const char *arg, int value)
2078{
2079 switch (code)
2080 {
2081 case OPT_mno_power:
2082 target_flags &= ~(MASK_POWER | MASK_POWER2
2083 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2084 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2085 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2086 break;
2087 case OPT_mno_powerpc:
2088 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2089 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2090 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2091 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2092 break;
2093 case OPT_mfull_toc:
d2894ab5
DE
2094 target_flags &= ~MASK_MINIMAL_TOC;
2095 TARGET_NO_FP_IN_TOC = 0;
2096 TARGET_NO_SUM_IN_TOC = 0;
2097 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2098#ifdef TARGET_USES_SYSV4_OPT
2099 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2100 just the same as -mminimal-toc. */
2101 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2102 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2103#endif
2104 break;
2105
2106#ifdef TARGET_USES_SYSV4_OPT
2107 case OPT_mtoc:
2108 /* Make -mtoc behave like -mminimal-toc. */
2109 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2110 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2111 break;
2112#endif
2113
2114#ifdef TARGET_USES_AIX64_OPT
2115 case OPT_maix64:
2116#else
2117 case OPT_m64:
2118#endif
2c9c9afd
AM
2119 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2120 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2121 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2122 break;
2123
2124#ifdef TARGET_USES_AIX64_OPT
2125 case OPT_maix32:
2126#else
2127 case OPT_m32:
2128#endif
2129 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2130 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2131 break;
2132
2133 case OPT_minsert_sched_nops_:
2134 rs6000_sched_insert_nops_str = arg;
2135 break;
2136
2137 case OPT_mminimal_toc:
2138 if (value == 1)
2139 {
d2894ab5
DE
2140 TARGET_NO_FP_IN_TOC = 0;
2141 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2142 }
2143 break;
2144
2145 case OPT_mpower:
2146 if (value == 1)
c2dba4ab
AH
2147 {
2148 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2149 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2150 }
78f5898b
AH
2151 break;
2152
2153 case OPT_mpower2:
2154 if (value == 1)
c2dba4ab
AH
2155 {
2156 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2157 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2158 }
78f5898b
AH
2159 break;
2160
2161 case OPT_mpowerpc_gpopt:
2162 case OPT_mpowerpc_gfxopt:
2163 if (value == 1)
c2dba4ab
AH
2164 {
2165 target_flags |= MASK_POWERPC;
2166 target_flags_explicit |= MASK_POWERPC;
2167 }
78f5898b
AH
2168 break;
2169
df01da37
DE
2170 case OPT_maix_struct_return:
2171 case OPT_msvr4_struct_return:
2172 rs6000_explicit_options.aix_struct_ret = true;
2173 break;
2174
78f5898b 2175 case OPT_mvrsave_:
a2db2771 2176 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2177 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2178 break;
78f5898b
AH
2179
2180 case OPT_misel_:
2181 rs6000_explicit_options.isel = true;
2182 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2183 break;
2184
2185 case OPT_mspe_:
2186 rs6000_explicit_options.spe = true;
2187 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2188 break;
2189
2190 case OPT_mdebug_:
2191 rs6000_debug_name = arg;
2192 break;
2193
2194#ifdef TARGET_USES_SYSV4_OPT
2195 case OPT_mcall_:
2196 rs6000_abi_name = arg;
2197 break;
2198
2199 case OPT_msdata_:
2200 rs6000_sdata_name = arg;
2201 break;
2202
2203 case OPT_mtls_size_:
2204 rs6000_tls_size_string = arg;
2205 break;
2206
2207 case OPT_mrelocatable:
2208 if (value == 1)
c2dba4ab 2209 {
e0bf274f
AM
2210 target_flags |= MASK_MINIMAL_TOC;
2211 target_flags_explicit |= MASK_MINIMAL_TOC;
2212 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2213 }
78f5898b
AH
2214 break;
2215
2216 case OPT_mrelocatable_lib:
2217 if (value == 1)
c2dba4ab 2218 {
e0bf274f
AM
2219 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2220 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2221 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2222 }
78f5898b 2223 else
c2dba4ab
AH
2224 {
2225 target_flags &= ~MASK_RELOCATABLE;
2226 target_flags_explicit |= MASK_RELOCATABLE;
2227 }
78f5898b
AH
2228 break;
2229#endif
2230
2231 case OPT_mabi_:
78f5898b
AH
2232 if (!strcmp (arg, "altivec"))
2233 {
a2db2771 2234 rs6000_explicit_options.altivec_abi = true;
78f5898b 2235 rs6000_altivec_abi = 1;
a2db2771
JJ
2236
2237 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2238 rs6000_spe_abi = 0;
2239 }
2240 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2241 {
a2db2771 2242 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2243 rs6000_altivec_abi = 0;
2244 }
78f5898b
AH
2245 else if (! strcmp (arg, "spe"))
2246 {
a2db2771 2247 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2248 rs6000_spe_abi = 1;
2249 rs6000_altivec_abi = 0;
2250 if (!TARGET_SPE_ABI)
2251 error ("not configured for ABI: '%s'", arg);
2252 }
2253 else if (! strcmp (arg, "no-spe"))
d3603e8c 2254 {
a2db2771 2255 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2256 rs6000_spe_abi = 0;
2257 }
78f5898b
AH
2258
2259 /* These are here for testing during development only, do not
2260 document in the manual please. */
2261 else if (! strcmp (arg, "d64"))
2262 {
2263 rs6000_darwin64_abi = 1;
2264 warning (0, "Using darwin64 ABI");
2265 }
2266 else if (! strcmp (arg, "d32"))
2267 {
2268 rs6000_darwin64_abi = 0;
2269 warning (0, "Using old darwin ABI");
2270 }
2271
602ea4d3
JJ
2272 else if (! strcmp (arg, "ibmlongdouble"))
2273 {
d3603e8c 2274 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2275 rs6000_ieeequad = 0;
2276 warning (0, "Using IBM extended precision long double");
2277 }
2278 else if (! strcmp (arg, "ieeelongdouble"))
2279 {
d3603e8c 2280 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2281 rs6000_ieeequad = 1;
2282 warning (0, "Using IEEE extended precision long double");
2283 }
2284
78f5898b
AH
2285 else
2286 {
2287 error ("unknown ABI specified: '%s'", arg);
2288 return false;
2289 }
2290 break;
2291
2292 case OPT_mcpu_:
2293 rs6000_select[1].string = arg;
2294 break;
2295
2296 case OPT_mtune_:
2297 rs6000_select[2].string = arg;
2298 break;
2299
2300 case OPT_mtraceback_:
2301 rs6000_traceback_name = arg;
2302 break;
2303
2304 case OPT_mfloat_gprs_:
2305 rs6000_explicit_options.float_gprs = true;
2306 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2307 rs6000_float_gprs = 1;
2308 else if (! strcmp (arg, "double"))
2309 rs6000_float_gprs = 2;
2310 else if (! strcmp (arg, "no"))
2311 rs6000_float_gprs = 0;
2312 else
2313 {
2314 error ("invalid option for -mfloat-gprs: '%s'", arg);
2315 return false;
2316 }
2317 break;
2318
2319 case OPT_mlong_double_:
2320 rs6000_explicit_options.long_double = true;
2321 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2322 if (value != 64 && value != 128)
2323 {
2324 error ("Unknown switch -mlong-double-%s", arg);
2325 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2326 return false;
2327 }
2328 else
2329 rs6000_long_double_type_size = value;
2330 break;
2331
2332 case OPT_msched_costly_dep_:
2333 rs6000_sched_costly_dep_str = arg;
2334 break;
2335
2336 case OPT_malign_:
2337 rs6000_explicit_options.alignment = true;
2338 if (! strcmp (arg, "power"))
2339 {
2340 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2341 some C library functions, so warn about it. The flag may be
2342 useful for performance studies from time to time though, so
2343 don't disable it entirely. */
2344 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2345 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2346 " it is incompatible with the installed C and C++ libraries");
2347 rs6000_alignment_flags = MASK_ALIGN_POWER;
2348 }
2349 else if (! strcmp (arg, "natural"))
2350 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2351 else
2352 {
2353 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2354 return false;
2355 }
2356 break;
2357 }
2358 return true;
2359}
3cfa4909
MM
2360\f
2361/* Do anything needed at the start of the asm file. */
2362
1bc7c5b6 2363static void
863d938c 2364rs6000_file_start (void)
3cfa4909 2365{
c4d38ccb 2366 size_t i;
3cfa4909 2367 char buffer[80];
d330fd93 2368 const char *start = buffer;
3cfa4909 2369 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2370 const char *default_cpu = TARGET_CPU_DEFAULT;
2371 FILE *file = asm_out_file;
2372
2373 default_file_start ();
2374
2375#ifdef TARGET_BI_ARCH
2376 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2377 default_cpu = 0;
2378#endif
3cfa4909
MM
2379
2380 if (flag_verbose_asm)
2381 {
2382 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2383 rs6000_select[0].string = default_cpu;
2384
b6a1cbae 2385 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2386 {
2387 ptr = &rs6000_select[i];
2388 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2389 {
2390 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2391 start = "";
2392 }
2393 }
2394
9c6b4ed9 2395 if (PPC405_ERRATUM77)
b0bfee6e 2396 {
9c6b4ed9 2397 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2398 start = "";
2399 }
b0bfee6e 2400
b91da81f 2401#ifdef USING_ELFOS_H
3cfa4909
MM
2402 switch (rs6000_sdata)
2403 {
2404 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2405 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2406 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2407 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2408 }
2409
2410 if (rs6000_sdata && g_switch_value)
2411 {
307b599c
MK
2412 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2413 g_switch_value);
3cfa4909
MM
2414 start = "";
2415 }
2416#endif
2417
2418 if (*start == '\0')
949ea356 2419 putc ('\n', file);
3cfa4909 2420 }
b723e82f 2421
e51917ae
JM
2422#ifdef HAVE_AS_GNU_ATTRIBUTE
2423 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2424 {
2425 fprintf (file, "\t.gnu_attribute 4, %d\n",
2426 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2427 fprintf (file, "\t.gnu_attribute 8, %d\n",
2428 (TARGET_ALTIVEC_ABI ? 2
2429 : TARGET_SPE_ABI ? 3
2430 : 1));
2431 }
e51917ae
JM
2432#endif
2433
b723e82f
JJ
2434 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2435 {
d6b5193b
RS
2436 switch_to_section (toc_section);
2437 switch_to_section (text_section);
b723e82f 2438 }
3cfa4909 2439}
c4e18b1c 2440
5248c961 2441\f
a0ab749a 2442/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2443
2444int
863d938c 2445direct_return (void)
9878760c 2446{
4697a36c
MM
2447 if (reload_completed)
2448 {
2449 rs6000_stack_t *info = rs6000_stack_info ();
2450
2451 if (info->first_gp_reg_save == 32
2452 && info->first_fp_reg_save == 64
00b960c7 2453 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2454 && ! info->lr_save_p
2455 && ! info->cr_save_p
00b960c7 2456 && info->vrsave_mask == 0
c81fc13e 2457 && ! info->push_p)
4697a36c
MM
2458 return 1;
2459 }
2460
2461 return 0;
9878760c
RK
2462}
2463
4e74d8ec
MM
2464/* Return the number of instructions it takes to form a constant in an
2465 integer register. */
2466
48d72335 2467int
a2369ed3 2468num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2469{
2470 /* signed constant loadable with {cal|addi} */
547b216d 2471 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2472 return 1;
2473
4e74d8ec 2474 /* constant loadable with {cau|addis} */
547b216d
DE
2475 else if ((value & 0xffff) == 0
2476 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2477 return 1;
2478
5f59ecb7 2479#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2480 else if (TARGET_POWERPC64)
4e74d8ec 2481 {
a65c591c
DE
2482 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2483 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2484
a65c591c 2485 if (high == 0 || high == -1)
4e74d8ec
MM
2486 return 2;
2487
a65c591c 2488 high >>= 1;
4e74d8ec 2489
a65c591c 2490 if (low == 0)
4e74d8ec 2491 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2492 else
2493 return (num_insns_constant_wide (high)
e396202a 2494 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2495 }
2496#endif
2497
2498 else
2499 return 2;
2500}
2501
2502int
a2369ed3 2503num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2504{
37409796 2505 HOST_WIDE_INT low, high;
bb8df8a6 2506
37409796 2507 switch (GET_CODE (op))
0d30d435 2508 {
37409796 2509 case CONST_INT:
0d30d435 2510#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2511 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2512 && mask64_operand (op, mode))
c4ad648e 2513 return 2;
0d30d435
DE
2514 else
2515#endif
2516 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2517
37409796 2518 case CONST_DOUBLE:
e41b2a33 2519 if (mode == SFmode || mode == SDmode)
37409796
NS
2520 {
2521 long l;
2522 REAL_VALUE_TYPE rv;
bb8df8a6 2523
37409796 2524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2525 if (DECIMAL_FLOAT_MODE_P (mode))
2526 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2527 else
2528 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2529 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2530 }
a260abc9 2531
37409796
NS
2532 if (mode == VOIDmode || mode == DImode)
2533 {
2534 high = CONST_DOUBLE_HIGH (op);
2535 low = CONST_DOUBLE_LOW (op);
2536 }
2537 else
2538 {
2539 long l[2];
2540 REAL_VALUE_TYPE rv;
bb8df8a6 2541
37409796 2542 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2543 if (DECIMAL_FLOAT_MODE_P (mode))
2544 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2545 else
2546 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2547 high = l[WORDS_BIG_ENDIAN == 0];
2548 low = l[WORDS_BIG_ENDIAN != 0];
2549 }
47ad8c61 2550
37409796
NS
2551 if (TARGET_32BIT)
2552 return (num_insns_constant_wide (low)
2553 + num_insns_constant_wide (high));
2554 else
2555 {
2556 if ((high == 0 && low >= 0)
2557 || (high == -1 && low < 0))
2558 return num_insns_constant_wide (low);
bb8df8a6 2559
1990cd79 2560 else if (mask64_operand (op, mode))
37409796 2561 return 2;
bb8df8a6 2562
37409796
NS
2563 else if (low == 0)
2564 return num_insns_constant_wide (high) + 1;
bb8df8a6 2565
37409796
NS
2566 else
2567 return (num_insns_constant_wide (high)
2568 + num_insns_constant_wide (low) + 1);
2569 }
bb8df8a6 2570
37409796
NS
2571 default:
2572 gcc_unreachable ();
4e74d8ec 2573 }
4e74d8ec
MM
2574}
2575
0972012c
RS
2576/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2577 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2578 corresponding element of the vector, but for V4SFmode and V2SFmode,
2579 the corresponding "float" is interpreted as an SImode integer. */
2580
2581static HOST_WIDE_INT
2582const_vector_elt_as_int (rtx op, unsigned int elt)
2583{
2584 rtx tmp = CONST_VECTOR_ELT (op, elt);
2585 if (GET_MODE (op) == V4SFmode
2586 || GET_MODE (op) == V2SFmode)
2587 tmp = gen_lowpart (SImode, tmp);
2588 return INTVAL (tmp);
2589}
452a7d36 2590
77ccdfed 2591/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2592 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2593 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2594 all items are set to the same value and contain COPIES replicas of the
2595 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2596 operand and the others are set to the value of the operand's msb. */
2597
2598static bool
2599vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2600{
66180ff3
PB
2601 enum machine_mode mode = GET_MODE (op);
2602 enum machine_mode inner = GET_MODE_INNER (mode);
2603
2604 unsigned i;
2605 unsigned nunits = GET_MODE_NUNITS (mode);
2606 unsigned bitsize = GET_MODE_BITSIZE (inner);
2607 unsigned mask = GET_MODE_MASK (inner);
2608
0972012c 2609 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2610 HOST_WIDE_INT splat_val = val;
2611 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2612
2613 /* Construct the value to be splatted, if possible. If not, return 0. */
2614 for (i = 2; i <= copies; i *= 2)
452a7d36 2615 {
66180ff3
PB
2616 HOST_WIDE_INT small_val;
2617 bitsize /= 2;
2618 small_val = splat_val >> bitsize;
2619 mask >>= bitsize;
2620 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2621 return false;
2622 splat_val = small_val;
2623 }
c4ad648e 2624
66180ff3
PB
2625 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2626 if (EASY_VECTOR_15 (splat_val))
2627 ;
2628
2629 /* Also check if we can splat, and then add the result to itself. Do so if
2630 the value is positive, of if the splat instruction is using OP's mode;
2631 for splat_val < 0, the splat and the add should use the same mode. */
2632 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2633 && (splat_val >= 0 || (step == 1 && copies == 1)))
2634 ;
2635
2636 else
2637 return false;
2638
2639 /* Check if VAL is present in every STEP-th element, and the
2640 other elements are filled with its most significant bit. */
2641 for (i = 0; i < nunits - 1; ++i)
2642 {
2643 HOST_WIDE_INT desired_val;
2644 if (((i + 1) & (step - 1)) == 0)
2645 desired_val = val;
2646 else
2647 desired_val = msb_val;
2648
0972012c 2649 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2650 return false;
452a7d36 2651 }
66180ff3
PB
2652
2653 return true;
452a7d36
HP
2654}
2655
69ef87e2 2656
77ccdfed 2657/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2658 with a vspltisb, vspltish or vspltisw. */
2659
2660bool
2661easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2662{
66180ff3 2663 unsigned step, copies;
d744e06e 2664
66180ff3
PB
2665 if (mode == VOIDmode)
2666 mode = GET_MODE (op);
2667 else if (mode != GET_MODE (op))
2668 return false;
d744e06e 2669
66180ff3
PB
2670 /* Start with a vspltisw. */
2671 step = GET_MODE_NUNITS (mode) / 4;
2672 copies = 1;
2673
2674 if (vspltis_constant (op, step, copies))
2675 return true;
2676
2677 /* Then try with a vspltish. */
2678 if (step == 1)
2679 copies <<= 1;
2680 else
2681 step >>= 1;
2682
2683 if (vspltis_constant (op, step, copies))
2684 return true;
2685
2686 /* And finally a vspltisb. */
2687 if (step == 1)
2688 copies <<= 1;
2689 else
2690 step >>= 1;
2691
2692 if (vspltis_constant (op, step, copies))
2693 return true;
2694
2695 return false;
d744e06e
AH
2696}
2697
66180ff3
PB
2698/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2699 result is OP. Abort if it is not possible. */
d744e06e 2700
f676971a 2701rtx
66180ff3 2702gen_easy_altivec_constant (rtx op)
452a7d36 2703{
66180ff3
PB
2704 enum machine_mode mode = GET_MODE (op);
2705 int nunits = GET_MODE_NUNITS (mode);
2706 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2707 unsigned step = nunits / 4;
2708 unsigned copies = 1;
2709
2710 /* Start with a vspltisw. */
2711 if (vspltis_constant (op, step, copies))
2712 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2713
2714 /* Then try with a vspltish. */
2715 if (step == 1)
2716 copies <<= 1;
2717 else
2718 step >>= 1;
2719
2720 if (vspltis_constant (op, step, copies))
2721 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2722
2723 /* And finally a vspltisb. */
2724 if (step == 1)
2725 copies <<= 1;
2726 else
2727 step >>= 1;
2728
2729 if (vspltis_constant (op, step, copies))
2730 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2731
2732 gcc_unreachable ();
d744e06e
AH
2733}
2734
2735const char *
a2369ed3 2736output_vec_const_move (rtx *operands)
d744e06e
AH
2737{
2738 int cst, cst2;
2739 enum machine_mode mode;
2740 rtx dest, vec;
2741
2742 dest = operands[0];
2743 vec = operands[1];
d744e06e 2744 mode = GET_MODE (dest);
69ef87e2 2745
d744e06e
AH
2746 if (TARGET_ALTIVEC)
2747 {
66180ff3 2748 rtx splat_vec;
d744e06e
AH
2749 if (zero_constant (vec, mode))
2750 return "vxor %0,%0,%0";
37409796 2751
66180ff3
PB
2752 splat_vec = gen_easy_altivec_constant (vec);
2753 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2754 operands[1] = XEXP (splat_vec, 0);
2755 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2756 return "#";
bb8df8a6 2757
66180ff3 2758 switch (GET_MODE (splat_vec))
98ef3137 2759 {
37409796 2760 case V4SImode:
66180ff3 2761 return "vspltisw %0,%1";
c4ad648e 2762
37409796 2763 case V8HImode:
66180ff3 2764 return "vspltish %0,%1";
c4ad648e 2765
37409796 2766 case V16QImode:
66180ff3 2767 return "vspltisb %0,%1";
bb8df8a6 2768
37409796
NS
2769 default:
2770 gcc_unreachable ();
98ef3137 2771 }
69ef87e2
AH
2772 }
2773
37409796 2774 gcc_assert (TARGET_SPE);
bb8df8a6 2775
37409796
NS
2776 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2777 pattern of V1DI, V4HI, and V2SF.
2778
2779 FIXME: We should probably return # and add post reload
2780 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2781 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2782 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2783 operands[1] = CONST_VECTOR_ELT (vec, 0);
2784 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2785 if (cst == cst2)
2786 return "li %0,%1\n\tevmergelo %0,%0,%0";
2787 else
2788 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2789}
2790
f5027409
RE
2791/* Initialize TARGET of vector PAIRED to VALS. */
2792
2793void
2794paired_expand_vector_init (rtx target, rtx vals)
2795{
2796 enum machine_mode mode = GET_MODE (target);
2797 int n_elts = GET_MODE_NUNITS (mode);
2798 int n_var = 0;
2799 rtx x, new, tmp, constant_op, op1, op2;
2800 int i;
2801
2802 for (i = 0; i < n_elts; ++i)
2803 {
2804 x = XVECEXP (vals, 0, i);
2805 if (!CONSTANT_P (x))
2806 ++n_var;
2807 }
2808 if (n_var == 0)
2809 {
2810 /* Load from constant pool. */
2811 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2812 return;
2813 }
2814
2815 if (n_var == 2)
2816 {
2817 /* The vector is initialized only with non-constants. */
2818 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2819 XVECEXP (vals, 0, 1));
2820
2821 emit_move_insn (target, new);
2822 return;
2823 }
2824
2825 /* One field is non-constant and the other one is a constant. Load the
2826 constant from the constant pool and use ps_merge instruction to
2827 construct the whole vector. */
2828 op1 = XVECEXP (vals, 0, 0);
2829 op2 = XVECEXP (vals, 0, 1);
2830
2831 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2832
2833 tmp = gen_reg_rtx (GET_MODE (constant_op));
2834 emit_move_insn (tmp, constant_op);
2835
2836 if (CONSTANT_P (op1))
2837 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2838 else
2839 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2840
2841 emit_move_insn (target, new);
2842}
2843
e2e95f45
RE
2844void
2845paired_expand_vector_move (rtx operands[])
2846{
2847 rtx op0 = operands[0], op1 = operands[1];
2848
2849 emit_move_insn (op0, op1);
2850}
2851
2852/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2853 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2854 operands for the relation operation COND. This is a recursive
2855 function. */
2856
2857static void
2858paired_emit_vector_compare (enum rtx_code rcode,
2859 rtx dest, rtx op0, rtx op1,
2860 rtx cc_op0, rtx cc_op1)
2861{
2862 rtx tmp = gen_reg_rtx (V2SFmode);
2863 rtx tmp1, max, min, equal_zero;
2864
2865 gcc_assert (TARGET_PAIRED_FLOAT);
2866 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2867
2868 switch (rcode)
2869 {
2870 case LT:
2871 case LTU:
2872 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2873 return;
2874 case GE:
2875 case GEU:
2876 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2877 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2878 return;
2879 case LE:
2880 case LEU:
2881 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2882 return;
2883 case GT:
2884 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2885 return;
2886 case EQ:
2887 tmp1 = gen_reg_rtx (V2SFmode);
2888 max = gen_reg_rtx (V2SFmode);
2889 min = gen_reg_rtx (V2SFmode);
2890 equal_zero = gen_reg_rtx (V2SFmode);
2891
2892 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2893 emit_insn (gen_selv2sf4
2894 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2895 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2896 emit_insn (gen_selv2sf4
2897 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2898 emit_insn (gen_subv2sf3 (tmp1, min, max));
2899 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2900 return;
2901 case NE:
2902 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2903 return;
2904 case UNLE:
2905 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2906 return;
2907 case UNLT:
2908 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2909 return;
2910 case UNGE:
2911 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2912 return;
2913 case UNGT:
2914 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2915 return;
2916 default:
2917 gcc_unreachable ();
2918 }
2919
2920 return;
2921}
2922
2923/* Emit vector conditional expression.
2924 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2925 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2926
2927int
2928paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2929 rtx cond, rtx cc_op0, rtx cc_op1)
2930{
2931 enum rtx_code rcode = GET_CODE (cond);
2932
2933 if (!TARGET_PAIRED_FLOAT)
2934 return 0;
2935
2936 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2937
2938 return 1;
2939}
2940
7a4eca66
DE
2941/* Initialize vector TARGET to VALS. */
2942
2943void
2944rs6000_expand_vector_init (rtx target, rtx vals)
2945{
2946 enum machine_mode mode = GET_MODE (target);
2947 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2948 int n_elts = GET_MODE_NUNITS (mode);
2949 int n_var = 0, one_var = -1;
2950 bool all_same = true, all_const_zero = true;
2951 rtx x, mem;
2952 int i;
2953
2954 for (i = 0; i < n_elts; ++i)
2955 {
2956 x = XVECEXP (vals, 0, i);
2957 if (!CONSTANT_P (x))
2958 ++n_var, one_var = i;
2959 else if (x != CONST0_RTX (inner_mode))
2960 all_const_zero = false;
2961
2962 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2963 all_same = false;
2964 }
2965
2966 if (n_var == 0)
2967 {
2968 if (mode != V4SFmode && all_const_zero)
2969 {
2970 /* Zero register. */
2971 emit_insn (gen_rtx_SET (VOIDmode, target,
2972 gen_rtx_XOR (mode, target, target)));
2973 return;
2974 }
66180ff3 2975 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2976 {
2977 /* Splat immediate. */
66180ff3 2978 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2979 return;
2980 }
2981 else if (all_same)
2982 ; /* Splat vector element. */
2983 else
2984 {
2985 /* Load from constant pool. */
2986 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2987 return;
2988 }
2989 }
2990
2991 /* Store value to stack temp. Load vector element. Splat. */
2992 if (all_same)
2993 {
2994 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2995 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2996 XVECEXP (vals, 0, 0));
2997 x = gen_rtx_UNSPEC (VOIDmode,
2998 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2999 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3000 gen_rtvec (2,
3001 gen_rtx_SET (VOIDmode,
3002 target, mem),
3003 x)));
3004 x = gen_rtx_VEC_SELECT (inner_mode, target,
3005 gen_rtx_PARALLEL (VOIDmode,
3006 gen_rtvec (1, const0_rtx)));
3007 emit_insn (gen_rtx_SET (VOIDmode, target,
3008 gen_rtx_VEC_DUPLICATE (mode, x)));
3009 return;
3010 }
3011
3012 /* One field is non-constant. Load constant then overwrite
3013 varying field. */
3014 if (n_var == 1)
3015 {
3016 rtx copy = copy_rtx (vals);
3017
57b51d4d 3018 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3019 varying element. */
3020 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3021 rs6000_expand_vector_init (target, copy);
3022
3023 /* Insert variable. */
3024 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3025 return;
3026 }
3027
3028 /* Construct the vector in memory one field at a time
3029 and load the whole vector. */
3030 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3031 for (i = 0; i < n_elts; i++)
3032 emit_move_insn (adjust_address_nv (mem, inner_mode,
3033 i * GET_MODE_SIZE (inner_mode)),
3034 XVECEXP (vals, 0, i));
3035 emit_move_insn (target, mem);
3036}
3037
3038/* Set field ELT of TARGET to VAL. */
3039
3040void
3041rs6000_expand_vector_set (rtx target, rtx val, int elt)
3042{
3043 enum machine_mode mode = GET_MODE (target);
3044 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3045 rtx reg = gen_reg_rtx (mode);
3046 rtx mask, mem, x;
3047 int width = GET_MODE_SIZE (inner_mode);
3048 int i;
3049
3050 /* Load single variable value. */
3051 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3052 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3053 x = gen_rtx_UNSPEC (VOIDmode,
3054 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3055 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3056 gen_rtvec (2,
3057 gen_rtx_SET (VOIDmode,
3058 reg, mem),
3059 x)));
3060
3061 /* Linear sequence. */
3062 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3063 for (i = 0; i < 16; ++i)
3064 XVECEXP (mask, 0, i) = GEN_INT (i);
3065
3066 /* Set permute mask to insert element into target. */
3067 for (i = 0; i < width; ++i)
3068 XVECEXP (mask, 0, elt*width + i)
3069 = GEN_INT (i + 0x10);
3070 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3071 x = gen_rtx_UNSPEC (mode,
3072 gen_rtvec (3, target, reg,
3073 force_reg (V16QImode, x)),
3074 UNSPEC_VPERM);
3075 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3076}
3077
3078/* Extract field ELT from VEC into TARGET. */
3079
3080void
3081rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3082{
3083 enum machine_mode mode = GET_MODE (vec);
3084 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3085 rtx mem, x;
3086
3087 /* Allocate mode-sized buffer. */
3088 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3089
3090 /* Add offset to field within buffer matching vector element. */
3091 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3092
3093 /* Store single field into mode-sized buffer. */
3094 x = gen_rtx_UNSPEC (VOIDmode,
3095 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3096 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3097 gen_rtvec (2,
3098 gen_rtx_SET (VOIDmode,
3099 mem, vec),
3100 x)));
3101 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3102}
3103
0ba1b2ff
AM
3104/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3105 implement ANDing by the mask IN. */
3106void
a2369ed3 3107build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3108{
3109#if HOST_BITS_PER_WIDE_INT >= 64
3110 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3111 int shift;
3112
37409796 3113 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3114
3115 c = INTVAL (in);
3116 if (c & 1)
3117 {
3118 /* Assume c initially something like 0x00fff000000fffff. The idea
3119 is to rotate the word so that the middle ^^^^^^ group of zeros
3120 is at the MS end and can be cleared with an rldicl mask. We then
3121 rotate back and clear off the MS ^^ group of zeros with a
3122 second rldicl. */
3123 c = ~c; /* c == 0xff000ffffff00000 */
3124 lsb = c & -c; /* lsb == 0x0000000000100000 */
3125 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3126 c = ~c; /* c == 0x00fff000000fffff */
3127 c &= -lsb; /* c == 0x00fff00000000000 */
3128 lsb = c & -c; /* lsb == 0x0000100000000000 */
3129 c = ~c; /* c == 0xff000fffffffffff */
3130 c &= -lsb; /* c == 0xff00000000000000 */
3131 shift = 0;
3132 while ((lsb >>= 1) != 0)
3133 shift++; /* shift == 44 on exit from loop */
3134 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3135 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3136 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3137 }
3138 else
0ba1b2ff
AM
3139 {
3140 /* Assume c initially something like 0xff000f0000000000. The idea
3141 is to rotate the word so that the ^^^ middle group of zeros
3142 is at the LS end and can be cleared with an rldicr mask. We then
3143 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3144 a second rldicr. */
3145 lsb = c & -c; /* lsb == 0x0000010000000000 */
3146 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3147 c = ~c; /* c == 0x00fff0ffffffffff */
3148 c &= -lsb; /* c == 0x00fff00000000000 */
3149 lsb = c & -c; /* lsb == 0x0000100000000000 */
3150 c = ~c; /* c == 0xff000fffffffffff */
3151 c &= -lsb; /* c == 0xff00000000000000 */
3152 shift = 0;
3153 while ((lsb >>= 1) != 0)
3154 shift++; /* shift == 44 on exit from loop */
3155 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3156 m1 >>= shift; /* m1 == 0x0000000000000fff */
3157 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3158 }
3159
3160 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3161 masks will be all 1's. We are guaranteed more than one transition. */
3162 out[0] = GEN_INT (64 - shift);
3163 out[1] = GEN_INT (m1);
3164 out[2] = GEN_INT (shift);
3165 out[3] = GEN_INT (m2);
3166#else
045572c7
GK
3167 (void)in;
3168 (void)out;
37409796 3169 gcc_unreachable ();
0ba1b2ff 3170#endif
a260abc9
DE
3171}
3172
54b695e7 3173/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3174
3175bool
54b695e7
AH
3176invalid_e500_subreg (rtx op, enum machine_mode mode)
3177{
61c76239
JM
3178 if (TARGET_E500_DOUBLE)
3179 {
17caeff2
JM
3180 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3181 subreg:TI and reg:TF. */
61c76239 3182 if (GET_CODE (op) == SUBREG
17caeff2 3183 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3184 && REG_P (SUBREG_REG (op))
17caeff2 3185 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3186 || GET_MODE (SUBREG_REG (op)) == TFmode
3187 || GET_MODE (SUBREG_REG (op)) == DDmode
3188 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3189 return true;
3190
17caeff2
JM
3191 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3192 reg:TI. */
61c76239 3193 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3194 && (mode == DFmode || mode == TFmode
3195 || mode == DDmode || mode == TDmode)
61c76239 3196 && REG_P (SUBREG_REG (op))
17caeff2
JM
3197 && (GET_MODE (SUBREG_REG (op)) == DImode
3198 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3199 return true;
3200 }
54b695e7 3201
61c76239
JM
3202 if (TARGET_SPE
3203 && GET_CODE (op) == SUBREG
3204 && mode == SImode
54b695e7 3205 && REG_P (SUBREG_REG (op))
14502dad 3206 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3207 return true;
3208
3209 return false;
3210}
3211
58182de3 3212/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3213 field is an FP double while the FP fields remain word aligned. */
3214
19d66194 3215unsigned int
fa5b0972
AM
3216rs6000_special_round_type_align (tree type, unsigned int computed,
3217 unsigned int specified)
95727fb8 3218{
fa5b0972 3219 unsigned int align = MAX (computed, specified);
95727fb8 3220 tree field = TYPE_FIELDS (type);
95727fb8 3221
bb8df8a6 3222 /* Skip all non field decls */
85962ac8 3223 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3224 field = TREE_CHAIN (field);
3225
fa5b0972
AM
3226 if (field != NULL && field != type)
3227 {
3228 type = TREE_TYPE (field);
3229 while (TREE_CODE (type) == ARRAY_TYPE)
3230 type = TREE_TYPE (type);
3231
3232 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3233 align = MAX (align, 64);
3234 }
95727fb8 3235
fa5b0972 3236 return align;
95727fb8
AP
3237}
3238
58182de3
GK
3239/* Darwin increases record alignment to the natural alignment of
3240 the first field. */
3241
3242unsigned int
3243darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3244 unsigned int specified)
3245{
3246 unsigned int align = MAX (computed, specified);
3247
3248 if (TYPE_PACKED (type))
3249 return align;
3250
3251 /* Find the first field, looking down into aggregates. */
3252 do {
3253 tree field = TYPE_FIELDS (type);
3254 /* Skip all non field decls */
3255 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3256 field = TREE_CHAIN (field);
3257 if (! field)
3258 break;
3259 type = TREE_TYPE (field);
3260 while (TREE_CODE (type) == ARRAY_TYPE)
3261 type = TREE_TYPE (type);
3262 } while (AGGREGATE_TYPE_P (type));
3263
3264 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3265 align = MAX (align, TYPE_ALIGN (type));
3266
3267 return align;
3268}
3269
a4f6c312 3270/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3271
3272int
f676971a 3273small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3274 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3275{
38c1f2d7 3276#if TARGET_ELF
5f59ecb7 3277 rtx sym_ref;
7509c759 3278
d9407988 3279 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3280 return 0;
a54d04b7 3281
f607bc57 3282 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3283 return 0;
3284
2aa42e6e
NF
3285 /* Vector and float memory instructions have a limited offset on the
3286 SPE, so using a vector or float variable directly as an operand is
3287 not useful. */
3288 if (TARGET_SPE
3289 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3290 return 0;
3291
88228c4b
MM
3292 if (GET_CODE (op) == SYMBOL_REF)
3293 sym_ref = op;
3294
3295 else if (GET_CODE (op) != CONST
3296 || GET_CODE (XEXP (op, 0)) != PLUS
3297 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3298 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3299 return 0;
3300
88228c4b 3301 else
dbf55e53
MM
3302 {
3303 rtx sum = XEXP (op, 0);
3304 HOST_WIDE_INT summand;
3305
3306 /* We have to be careful here, because it is the referenced address
c4ad648e 3307 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3308 summand = INTVAL (XEXP (sum, 1));
307b599c 3309 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3310 return 0;
dbf55e53
MM
3311
3312 sym_ref = XEXP (sum, 0);
3313 }
88228c4b 3314
20bfcd69 3315 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3316#else
3317 return 0;
3318#endif
7509c759 3319}
46c07df8 3320
3a1f863f 3321/* Return true if either operand is a general purpose register. */
46c07df8 3322
3a1f863f
DE
3323bool
3324gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3325{
3a1f863f
DE
3326 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3327 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3328}
3329
9ebbca7d 3330\f
4d588c14
RH
3331/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3332
f676971a
EC
3333static int
3334constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3335{
9390387d 3336 switch (GET_CODE (op))
9ebbca7d
GK
3337 {
3338 case SYMBOL_REF:
c4501e62
JJ
3339 if (RS6000_SYMBOL_REF_TLS_P (op))
3340 return 0;
3341 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3342 {
3343 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3344 {
3345 *have_sym = 1;
3346 return 1;
3347 }
3348 else
3349 return 0;
3350 }
3351 else if (! strcmp (XSTR (op, 0), toc_label_name))
3352 {
3353 *have_toc = 1;
3354 return 1;
3355 }
3356 else
3357 return 0;
9ebbca7d
GK
3358 case PLUS:
3359 case MINUS:
c1f11548
DE
3360 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3361 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3362 case CONST:
a4f6c312 3363 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3364 case CONST_INT:
a4f6c312 3365 return 1;
9ebbca7d 3366 default:
a4f6c312 3367 return 0;
9ebbca7d
GK
3368 }
3369}
3370
4d588c14 3371static bool
a2369ed3 3372constant_pool_expr_p (rtx op)
9ebbca7d
GK
3373{
3374 int have_sym = 0;
3375 int have_toc = 0;
3376 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3377}
3378
48d72335 3379bool
a2369ed3 3380toc_relative_expr_p (rtx op)
9ebbca7d 3381{
4d588c14
RH
3382 int have_sym = 0;
3383 int have_toc = 0;
3384 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3385}
3386
4d588c14 3387bool
a2369ed3 3388legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3389{
3390 return (TARGET_TOC
3391 && GET_CODE (x) == PLUS
3392 && GET_CODE (XEXP (x, 0)) == REG
3393 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3394 && constant_pool_expr_p (XEXP (x, 1)));
3395}
3396
d04b6e6e
EB
3397static bool
3398legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3399{
3400 return (DEFAULT_ABI == ABI_V4
3401 && !flag_pic && !TARGET_TOC
3402 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3403 && small_data_operand (x, mode));
3404}
3405
60cdabab
DE
3406/* SPE offset addressing is limited to 5-bits worth of double words. */
3407#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3408
76d2b81d
DJ
3409bool
3410rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3411{
3412 unsigned HOST_WIDE_INT offset, extra;
3413
3414 if (GET_CODE (x) != PLUS)
3415 return false;
3416 if (GET_CODE (XEXP (x, 0)) != REG)
3417 return false;
3418 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3419 return false;
60cdabab
DE
3420 if (legitimate_constant_pool_address_p (x))
3421 return true;
4d588c14
RH
3422 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3423 return false;
3424
3425 offset = INTVAL (XEXP (x, 1));
3426 extra = 0;
3427 switch (mode)
3428 {
3429 case V16QImode:
3430 case V8HImode:
3431 case V4SFmode:
3432 case V4SImode:
7a4eca66 3433 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3434 constant offset zero should not occur due to canonicalization. */
3435 return false;
4d588c14
RH
3436
3437 case V4HImode:
3438 case V2SImode:
3439 case V1DImode:
3440 case V2SFmode:
d42a3bae 3441 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3442 constant offset zero should not occur due to canonicalization. */
d42a3bae 3443 if (TARGET_PAIRED_FLOAT)
1a23970d 3444 return false;
4d588c14
RH
3445 /* SPE vector modes. */
3446 return SPE_CONST_OFFSET_OK (offset);
3447
3448 case DFmode:
7393f7f8 3449 case DDmode:
4d4cbc0e
AH
3450 if (TARGET_E500_DOUBLE)
3451 return SPE_CONST_OFFSET_OK (offset);
3452
4d588c14 3453 case DImode:
54b695e7
AH
3454 /* On e500v2, we may have:
3455
3456 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3457
3458 Which gets addressed with evldd instructions. */
3459 if (TARGET_E500_DOUBLE)
3460 return SPE_CONST_OFFSET_OK (offset);
3461
7393f7f8 3462 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3463 extra = 4;
3464 else if (offset & 3)
3465 return false;
3466 break;
3467
3468 case TFmode:
4d4447b5 3469 case TDmode:
17caeff2
JM
3470 if (TARGET_E500_DOUBLE)
3471 return (SPE_CONST_OFFSET_OK (offset)
3472 && SPE_CONST_OFFSET_OK (offset + 8));
3473
4d588c14 3474 case TImode:
7393f7f8 3475 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3476 extra = 12;
3477 else if (offset & 3)
3478 return false;
3479 else
3480 extra = 8;
3481 break;
3482
3483 default:
3484 break;
3485 }
3486
b1917422
AM
3487 offset += 0x8000;
3488 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3489}
3490
6fb5fa3c 3491bool
a2369ed3 3492legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3493{
3494 rtx op0, op1;
3495
3496 if (GET_CODE (x) != PLUS)
3497 return false;
850e8d3d 3498
4d588c14
RH
3499 op0 = XEXP (x, 0);
3500 op1 = XEXP (x, 1);
3501
bf00cc0f 3502 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3503 replaced with proper base and index regs. */
3504 if (!strict
3505 && reload_in_progress
3506 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3507 && REG_P (op1))
3508 return true;
3509
3510 return (REG_P (op0) && REG_P (op1)
3511 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3512 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3513 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3514 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3515}
3516
48d72335 3517inline bool
a2369ed3 3518legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3519{
3520 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3521}
3522
48d72335 3523bool
4c81e946
FJ
3524macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3525{
c4ad648e 3526 if (!TARGET_MACHO || !flag_pic
9390387d 3527 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3528 return false;
3529 x = XEXP (x, 0);
4c81e946
FJ
3530
3531 if (GET_CODE (x) != LO_SUM)
3532 return false;
3533 if (GET_CODE (XEXP (x, 0)) != REG)
3534 return false;
3535 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3536 return false;
3537 x = XEXP (x, 1);
3538
3539 return CONSTANT_P (x);
3540}
3541
4d588c14 3542static bool
a2369ed3 3543legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3544{
3545 if (GET_CODE (x) != LO_SUM)
3546 return false;
3547 if (GET_CODE (XEXP (x, 0)) != REG)
3548 return false;
3549 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3550 return false;
54b695e7 3551 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3552 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3553 || mode == DDmode || mode == TDmode
17caeff2 3554 || mode == DImode))
f82f556d 3555 return false;
4d588c14
RH
3556 x = XEXP (x, 1);
3557
8622e235 3558 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3559 {
a29077da 3560 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3561 return false;
3562 if (TARGET_TOC)
3563 return false;
3564 if (GET_MODE_NUNITS (mode) != 1)
3565 return false;
5e5f01b9 3566 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3567 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3568 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3569 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3570 return false;
3571
3572 return CONSTANT_P (x);
3573 }
3574
3575 return false;
3576}
3577
3578
9ebbca7d
GK
3579/* Try machine-dependent ways of modifying an illegitimate address
3580 to be legitimate. If we find one, return the new, valid address.
3581 This is used from only one place: `memory_address' in explow.c.
3582
a4f6c312
SS
3583 OLDX is the address as it was before break_out_memory_refs was
3584 called. In some cases it is useful to look at this to decide what
3585 needs to be done.
9ebbca7d 3586
a4f6c312 3587 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3588
a4f6c312
SS
3589 It is always safe for this function to do nothing. It exists to
3590 recognize opportunities to optimize the output.
9ebbca7d
GK
3591
3592 On RS/6000, first check for the sum of a register with a constant
3593 integer that is out of range. If so, generate code to add the
3594 constant with the low-order 16 bits masked to the register and force
3595 this result into another register (this can be done with `cau').
3596 Then generate an address of REG+(CONST&0xffff), allowing for the
3597 possibility of bit 16 being a one.
3598
3599 Then check for the sum of a register and something not constant, try to
3600 load the other things into a register and return the sum. */
4d588c14 3601
9ebbca7d 3602rtx
a2369ed3
DJ
3603rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3604 enum machine_mode mode)
0ac081f6 3605{
c4501e62
JJ
3606 if (GET_CODE (x) == SYMBOL_REF)
3607 {
3608 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3609 if (model != 0)
3610 return rs6000_legitimize_tls_address (x, model);
3611 }
3612
f676971a 3613 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3614 && GET_CODE (XEXP (x, 0)) == REG
3615 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3616 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3617 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3618 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb
JM
3619 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3620 || mode == DImode))))
f676971a 3621 {
9ebbca7d
GK
3622 HOST_WIDE_INT high_int, low_int;
3623 rtx sum;
a65c591c
DE
3624 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3625 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3626 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3627 GEN_INT (high_int)), 0);
3628 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3629 }
f676971a 3630 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3631 && GET_CODE (XEXP (x, 0)) == REG
3632 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3633 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3634 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3635 || TARGET_POWERPC64
efc05e3c
PB
3636 || ((mode != DImode && mode != DFmode && mode != DDmode)
3637 || TARGET_E500_DOUBLE))
9ebbca7d 3638 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3639 && mode != TImode
3640 && mode != TFmode
3641 && mode != TDmode)
9ebbca7d
GK
3642 {
3643 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3644 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3645 }
0ac081f6
AH
3646 else if (ALTIVEC_VECTOR_MODE (mode))
3647 {
3648 rtx reg;
3649
3650 /* Make sure both operands are registers. */
3651 if (GET_CODE (x) == PLUS)
9f85ed45 3652 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3653 force_reg (Pmode, XEXP (x, 1)));
3654
3655 reg = force_reg (Pmode, x);
3656 return reg;
3657 }
4d4cbc0e 3658 else if (SPE_VECTOR_MODE (mode)
17caeff2 3659 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3660 || mode == DDmode || mode == TDmode
54b695e7 3661 || mode == DImode)))
a3170dc6 3662 {
54b695e7
AH
3663 if (mode == DImode)
3664 return NULL_RTX;
a3170dc6
AH
3665 /* We accept [reg + reg] and [reg + OFFSET]. */
3666
3667 if (GET_CODE (x) == PLUS)
61dd226f
NF
3668 {
3669 rtx op1 = XEXP (x, 0);
3670 rtx op2 = XEXP (x, 1);
3671 rtx y;
3672
3673 op1 = force_reg (Pmode, op1);
3674
3675 if (GET_CODE (op2) != REG
3676 && (GET_CODE (op2) != CONST_INT
3677 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3678 || (GET_MODE_SIZE (mode) > 8
3679 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3680 op2 = force_reg (Pmode, op2);
3681
3682 /* We can't always do [reg + reg] for these, because [reg +
3683 reg + offset] is not a legitimate addressing mode. */
3684 y = gen_rtx_PLUS (Pmode, op1, op2);
3685
3686 if (GET_MODE_SIZE (mode) > 8 && REG_P (op2))
3687 return force_reg (Pmode, y);
3688 else
3689 return y;
3690 }
a3170dc6
AH
3691
3692 return force_reg (Pmode, x);
3693 }
f1384257
AM
3694 else if (TARGET_ELF
3695 && TARGET_32BIT
3696 && TARGET_NO_TOC
3697 && ! flag_pic
9ebbca7d 3698 && GET_CODE (x) != CONST_INT
f676971a 3699 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3700 && CONSTANT_P (x)
6ac7bf2c
GK
3701 && GET_MODE_NUNITS (mode) == 1
3702 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3703 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3704 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3705 {
3706 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3707 emit_insn (gen_elf_high (reg, x));
3708 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3709 }
ee890fe2
SS
3710 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3711 && ! flag_pic
ab82a49f
AP
3712#if TARGET_MACHO
3713 && ! MACHO_DYNAMIC_NO_PIC_P
3714#endif
ee890fe2 3715 && GET_CODE (x) != CONST_INT
f676971a 3716 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3717 && CONSTANT_P (x)
4d4447b5
PB
3718 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3719 || (mode != DFmode && mode != DDmode))
f676971a 3720 && mode != DImode
ee890fe2
SS
3721 && mode != TImode)
3722 {
3723 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3724 emit_insn (gen_macho_high (reg, x));
3725 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3726 }
f676971a 3727 else if (TARGET_TOC
4d588c14 3728 && constant_pool_expr_p (x)
a9098fd0 3729 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3730 {
3731 return create_TOC_reference (x);
3732 }
3733 else
3734 return NULL_RTX;
3735}
258bfae2 3736
fdbe66f2 3737/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3738 We need to emit DTP-relative relocations. */
3739
fdbe66f2 3740static void
c973d557
JJ
3741rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3742{
3743 switch (size)
3744 {
3745 case 4:
3746 fputs ("\t.long\t", file);
3747 break;
3748 case 8:
3749 fputs (DOUBLE_INT_ASM_OP, file);
3750 break;
3751 default:
37409796 3752 gcc_unreachable ();
c973d557
JJ
3753 }
3754 output_addr_const (file, x);
3755 fputs ("@dtprel+0x8000", file);
3756}
3757
c4501e62
JJ
3758/* Construct the SYMBOL_REF for the tls_get_addr function. */
3759
3760static GTY(()) rtx rs6000_tls_symbol;
3761static rtx
863d938c 3762rs6000_tls_get_addr (void)
c4501e62
JJ
3763{
3764 if (!rs6000_tls_symbol)
3765 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3766
3767 return rs6000_tls_symbol;
3768}
3769
3770/* Construct the SYMBOL_REF for TLS GOT references. */
3771
3772static GTY(()) rtx rs6000_got_symbol;
3773static rtx
863d938c 3774rs6000_got_sym (void)
c4501e62
JJ
3775{
3776 if (!rs6000_got_symbol)
3777 {
3778 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3779 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3780 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3781 }
c4501e62
JJ
3782
3783 return rs6000_got_symbol;
3784}
3785
3786/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3787 this (thread-local) address. */
3788
3789static rtx
a2369ed3 3790rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3791{
3792 rtx dest, insn;
3793
3794 dest = gen_reg_rtx (Pmode);
3795 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3796 {
3797 rtx tlsreg;
3798
3799 if (TARGET_64BIT)
3800 {
3801 tlsreg = gen_rtx_REG (Pmode, 13);
3802 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3803 }
3804 else
3805 {
3806 tlsreg = gen_rtx_REG (Pmode, 2);
3807 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3808 }
3809 emit_insn (insn);
3810 }
3811 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3812 {
3813 rtx tlsreg, tmp;
3814
3815 tmp = gen_reg_rtx (Pmode);
3816 if (TARGET_64BIT)
3817 {
3818 tlsreg = gen_rtx_REG (Pmode, 13);
3819 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3820 }
3821 else
3822 {
3823 tlsreg = gen_rtx_REG (Pmode, 2);
3824 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3825 }
3826 emit_insn (insn);
3827 if (TARGET_64BIT)
3828 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3829 else
3830 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3831 emit_insn (insn);
3832 }
3833 else
3834 {
3835 rtx r3, got, tga, tmp1, tmp2, eqv;
3836
4fed8f8f
AM
3837 /* We currently use relocations like @got@tlsgd for tls, which
3838 means the linker will handle allocation of tls entries, placing
3839 them in the .got section. So use a pointer to the .got section,
3840 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3841 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3842 if (TARGET_64BIT)
972f427b 3843 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3844 else
3845 {
3846 if (flag_pic == 1)
3847 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3848 else
3849 {
3850 rtx gsym = rs6000_got_sym ();
3851 got = gen_reg_rtx (Pmode);
3852 if (flag_pic == 0)
3853 rs6000_emit_move (got, gsym, Pmode);
3854 else
3855 {
e65a3857 3856 rtx tmp3, mem;
c4501e62
JJ
3857 rtx first, last;
3858
c4501e62
JJ
3859 tmp1 = gen_reg_rtx (Pmode);
3860 tmp2 = gen_reg_rtx (Pmode);
3861 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3862 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3863
e65a3857
DE
3864 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3865 emit_move_insn (tmp1,
1de43f85 3866 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3867 emit_move_insn (tmp2, mem);
3868 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3869 last = emit_move_insn (got, tmp3);
bd94cb6e 3870 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3871 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3872 }
3873 }
3874 }
3875
3876 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3877 {
3878 r3 = gen_rtx_REG (Pmode, 3);
3879 if (TARGET_64BIT)
3880 insn = gen_tls_gd_64 (r3, got, addr);
3881 else
3882 insn = gen_tls_gd_32 (r3, got, addr);
3883 start_sequence ();
3884 emit_insn (insn);
3885 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3886 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3887 insn = emit_call_insn (insn);
3888 CONST_OR_PURE_CALL_P (insn) = 1;
3889 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3890 insn = get_insns ();
3891 end_sequence ();
3892 emit_libcall_block (insn, dest, r3, addr);
3893 }
3894 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3895 {
3896 r3 = gen_rtx_REG (Pmode, 3);
3897 if (TARGET_64BIT)
3898 insn = gen_tls_ld_64 (r3, got);
3899 else
3900 insn = gen_tls_ld_32 (r3, got);
3901 start_sequence ();
3902 emit_insn (insn);
3903 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3904 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3905 insn = emit_call_insn (insn);
3906 CONST_OR_PURE_CALL_P (insn) = 1;
3907 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3908 insn = get_insns ();
3909 end_sequence ();
3910 tmp1 = gen_reg_rtx (Pmode);
3911 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3912 UNSPEC_TLSLD);
3913 emit_libcall_block (insn, tmp1, r3, eqv);
3914 if (rs6000_tls_size == 16)
3915 {
3916 if (TARGET_64BIT)
3917 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3918 else
3919 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3920 }
3921 else if (rs6000_tls_size == 32)
3922 {
3923 tmp2 = gen_reg_rtx (Pmode);
3924 if (TARGET_64BIT)
3925 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3926 else
3927 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3928 emit_insn (insn);
3929 if (TARGET_64BIT)
3930 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3931 else
3932 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3933 }
3934 else
3935 {
3936 tmp2 = gen_reg_rtx (Pmode);
3937 if (TARGET_64BIT)
3938 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3939 else
3940 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3941 emit_insn (insn);
3942 insn = gen_rtx_SET (Pmode, dest,
3943 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3944 }
3945 emit_insn (insn);
3946 }
3947 else
3948 {
a7b376ee 3949 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3950 tmp2 = gen_reg_rtx (Pmode);
3951 if (TARGET_64BIT)
3952 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3953 else
3954 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3955 emit_insn (insn);
3956 if (TARGET_64BIT)
3957 insn = gen_tls_tls_64 (dest, tmp2, addr);
3958 else
3959 insn = gen_tls_tls_32 (dest, tmp2, addr);
3960 emit_insn (insn);
3961 }
3962 }
3963
3964 return dest;
3965}
3966
c4501e62
JJ
3967/* Return 1 if X contains a thread-local symbol. */
3968
3969bool
a2369ed3 3970rs6000_tls_referenced_p (rtx x)
c4501e62 3971{
cd413cab
AP
3972 if (! TARGET_HAVE_TLS)
3973 return false;
3974
c4501e62
JJ
3975 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3976}
3977
3978/* Return 1 if *X is a thread-local symbol. This is the same as
3979 rs6000_tls_symbol_ref except for the type of the unused argument. */
3980
9390387d 3981static int
a2369ed3 3982rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3983{
3984 return RS6000_SYMBOL_REF_TLS_P (*x);
3985}
3986
24ea750e
DJ
3987/* The convention appears to be to define this wherever it is used.
3988 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3989 is now used here. */
3990#ifndef REG_MODE_OK_FOR_BASE_P
3991#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3992#endif
3993
3994/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3995 replace the input X, or the original X if no replacement is called for.
3996 The output parameter *WIN is 1 if the calling macro should goto WIN,
3997 0 if it should not.
3998
3999 For RS/6000, we wish to handle large displacements off a base
4000 register by splitting the addend across an addiu/addis and the mem insn.
4001 This cuts number of extra insns needed from 3 to 1.
4002
4003 On Darwin, we use this to generate code for floating point constants.
4004 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4005 The Darwin code is inside #if TARGET_MACHO because only then is
4006 machopic_function_base_name() defined. */
4007rtx
f676971a 4008rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4009 int opnum, int type,
4010 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4011{
f676971a 4012 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4013 if (GET_CODE (x) == PLUS
4014 && GET_CODE (XEXP (x, 0)) == PLUS
4015 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4016 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4017 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4018 {
4019 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4020 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4021 opnum, (enum reload_type)type);
24ea750e
DJ
4022 *win = 1;
4023 return x;
4024 }
3deb2758 4025
24ea750e
DJ
4026#if TARGET_MACHO
4027 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4028 && GET_CODE (x) == LO_SUM
4029 && GET_CODE (XEXP (x, 0)) == PLUS
4030 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4031 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4032 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4033 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4034 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4035 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4036 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4037 {
4038 /* Result of previous invocation of this function on Darwin
6f317ef3 4039 floating point constant. */
24ea750e 4040 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4041 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4042 opnum, (enum reload_type)type);
24ea750e
DJ
4043 *win = 1;
4044 return x;
4045 }
4046#endif
4937d02d
DE
4047
4048 /* Force ld/std non-word aligned offset into base register by wrapping
4049 in offset 0. */
4050 if (GET_CODE (x) == PLUS
4051 && GET_CODE (XEXP (x, 0)) == REG
4052 && REGNO (XEXP (x, 0)) < 32
4053 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4054 && GET_CODE (XEXP (x, 1)) == CONST_INT
4055 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4056 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4057 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4058 && TARGET_POWERPC64)
4059 {
4060 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4061 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4062 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4063 opnum, (enum reload_type) type);
4064 *win = 1;
4065 return x;
4066 }
4067
24ea750e
DJ
4068 if (GET_CODE (x) == PLUS
4069 && GET_CODE (XEXP (x, 0)) == REG
4070 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4071 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4072 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4073 && !SPE_VECTOR_MODE (mode)
17caeff2 4074 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4075 || mode == DDmode || mode == TDmode
54b695e7 4076 || mode == DImode))
78c875e8 4077 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4078 {
4079 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4080 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4081 HOST_WIDE_INT high
c4ad648e 4082 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4083
4084 /* Check for 32-bit overflow. */
4085 if (high + low != val)
c4ad648e 4086 {
24ea750e
DJ
4087 *win = 0;
4088 return x;
4089 }
4090
4091 /* Reload the high part into a base reg; leave the low part
c4ad648e 4092 in the mem directly. */
24ea750e
DJ
4093
4094 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4095 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4096 GEN_INT (high)),
4097 GEN_INT (low));
24ea750e
DJ
4098
4099 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4100 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4101 opnum, (enum reload_type)type);
24ea750e
DJ
4102 *win = 1;
4103 return x;
4104 }
4937d02d 4105
24ea750e 4106 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4107 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4108 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4109#if TARGET_MACHO
4110 && DEFAULT_ABI == ABI_DARWIN
a29077da 4111 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4112#else
4113 && DEFAULT_ABI == ABI_V4
4114 && !flag_pic
4115#endif
7393f7f8 4116 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4117 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4118 without fprs. */
0d8c1c97 4119 && mode != TFmode
7393f7f8 4120 && mode != TDmode
7b5d92b2 4121 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4122 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4123 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4124 {
8308679f 4125#if TARGET_MACHO
a29077da
GK
4126 if (flag_pic)
4127 {
4128 rtx offset = gen_rtx_CONST (Pmode,
4129 gen_rtx_MINUS (Pmode, x,
11abc112 4130 machopic_function_base_sym ()));
a29077da
GK
4131 x = gen_rtx_LO_SUM (GET_MODE (x),
4132 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4133 gen_rtx_HIGH (Pmode, offset)), offset);
4134 }
4135 else
8308679f 4136#endif
a29077da 4137 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4138 gen_rtx_HIGH (Pmode, x), x);
a29077da 4139
24ea750e 4140 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4141 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4142 opnum, (enum reload_type)type);
24ea750e
DJ
4143 *win = 1;
4144 return x;
4145 }
4937d02d 4146
dec1f3aa
DE
4147 /* Reload an offset address wrapped by an AND that represents the
4148 masking of the lower bits. Strip the outer AND and let reload
4149 convert the offset address into an indirect address. */
4150 if (TARGET_ALTIVEC
4151 && ALTIVEC_VECTOR_MODE (mode)
4152 && GET_CODE (x) == AND
4153 && GET_CODE (XEXP (x, 0)) == PLUS
4154 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4155 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4156 && GET_CODE (XEXP (x, 1)) == CONST_INT
4157 && INTVAL (XEXP (x, 1)) == -16)
4158 {
4159 x = XEXP (x, 0);
4160 *win = 1;
4161 return x;
4162 }
4163
24ea750e 4164 if (TARGET_TOC
4d588c14 4165 && constant_pool_expr_p (x)
c1f11548 4166 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4167 {
194c524a 4168 x = create_TOC_reference (x);
24ea750e
DJ
4169 *win = 1;
4170 return x;
4171 }
4172 *win = 0;
4173 return x;
f676971a 4174}
24ea750e 4175
258bfae2
FS
4176/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4177 that is a valid memory address for an instruction.
4178 The MODE argument is the machine mode for the MEM expression
4179 that wants to use this address.
4180
4181 On the RS/6000, there are four valid address: a SYMBOL_REF that
4182 refers to a constant pool entry of an address (or the sum of it
4183 plus a constant), a short (16-bit signed) constant plus a register,
4184 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4185 auto-increment. For DFmode, DDmode and DImode with a constant plus
4186 register, we must ensure that both words are addressable or PowerPC64
4187 with offset word aligned.
258bfae2 4188
4d4447b5 4189 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4190 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4191 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4192 during assembly output. */
4193int
a2369ed3 4194rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4195{
850e8d3d
DN
4196 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4197 if (TARGET_ALTIVEC
4198 && ALTIVEC_VECTOR_MODE (mode)
4199 && GET_CODE (x) == AND
4200 && GET_CODE (XEXP (x, 1)) == CONST_INT
4201 && INTVAL (XEXP (x, 1)) == -16)
4202 x = XEXP (x, 0);
4203
c4501e62
JJ
4204 if (RS6000_SYMBOL_REF_TLS_P (x))
4205 return 0;
4d588c14 4206 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4207 return 1;
4208 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4209 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4210 && !SPE_VECTOR_MODE (mode)
429ec7dc 4211 && mode != TFmode
7393f7f8 4212 && mode != TDmode
54b695e7 4213 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4214 && !(TARGET_E500_DOUBLE
4215 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4216 && TARGET_UPDATE
4d588c14 4217 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4218 return 1;
d04b6e6e 4219 if (legitimate_small_data_p (mode, x))
258bfae2 4220 return 1;
4d588c14 4221 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4222 return 1;
4223 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4224 if (! reg_ok_strict
4225 && GET_CODE (x) == PLUS
4226 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4227 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4228 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4229 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4230 return 1;
76d2b81d 4231 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4232 return 1;
4233 if (mode != TImode
76d2b81d 4234 && mode != TFmode
7393f7f8 4235 && mode != TDmode
a3170dc6
AH
4236 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4237 || TARGET_POWERPC64
4d4447b5 4238 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4239 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4240 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4241 return 1;
6fb5fa3c
DB
4242 if (GET_CODE (x) == PRE_MODIFY
4243 && mode != TImode
4244 && mode != TFmode
4245 && mode != TDmode
4246 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4247 || TARGET_POWERPC64
4d4447b5 4248 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4249 && (TARGET_POWERPC64 || mode != DImode)
4250 && !ALTIVEC_VECTOR_MODE (mode)
4251 && !SPE_VECTOR_MODE (mode)
4252 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4253 && !(TARGET_E500_DOUBLE
4254 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4255 && TARGET_UPDATE
4256 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4257 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4258 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4259 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4260 return 1;
4d588c14 4261 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4262 return 1;
4263 return 0;
4264}
4d588c14
RH
4265
4266/* Go to LABEL if ADDR (a legitimate address expression)
4267 has an effect that depends on the machine mode it is used for.
4268
4269 On the RS/6000 this is true of all integral offsets (since AltiVec
4270 modes don't allow them) or is a pre-increment or decrement.
4271
4272 ??? Except that due to conceptual problems in offsettable_address_p
4273 we can't really report the problems of integral offsets. So leave
f676971a 4274 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4275 sub-words of a TFmode operand, which is what we had before. */
4276
4277bool
a2369ed3 4278rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4279{
4280 switch (GET_CODE (addr))
4281 {
4282 case PLUS:
4283 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4284 {
4285 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4286 return val + 12 + 0x8000 >= 0x10000;
4287 }
4288 break;
4289
4290 case LO_SUM:
4291 return true;
4292
6fb5fa3c
DB
4293 case PRE_INC:
4294 case PRE_DEC:
4295 case PRE_MODIFY:
4296 return TARGET_UPDATE;
4d588c14
RH
4297
4298 default:
4299 break;
4300 }
4301
4302 return false;
4303}
d8ecbcdb 4304
d04b6e6e
EB
4305/* More elaborate version of recog's offsettable_memref_p predicate
4306 that works around the ??? note of rs6000_mode_dependent_address.
4307 In particular it accepts
4308
4309 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4310
4311 in 32-bit mode, that the recog predicate rejects. */
4312
4313bool
4314rs6000_offsettable_memref_p (rtx op)
4315{
4316 if (!MEM_P (op))
4317 return false;
4318
4319 /* First mimic offsettable_memref_p. */
4320 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4321 return true;
4322
4323 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4324 the latter predicate knows nothing about the mode of the memory
4325 reference and, therefore, assumes that it is the largest supported
4326 mode (TFmode). As a consequence, legitimate offsettable memory
4327 references are rejected. rs6000_legitimate_offset_address_p contains
4328 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4329 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4330}
4331
d8ecbcdb
AH
4332/* Return number of consecutive hard regs needed starting at reg REGNO
4333 to hold something of mode MODE.
4334 This is ordinarily the length in words of a value of mode MODE
4335 but can be less for certain modes in special long registers.
4336
4337 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4338 scalar instructions. The upper 32 bits are only available to the
4339 SIMD instructions.
4340
4341 POWER and PowerPC GPRs hold 32 bits worth;
4342 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4343
4344int
4345rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4346{
4347 if (FP_REGNO_P (regno))
4348 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4349
4350 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4351 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4352
4353 if (ALTIVEC_REGNO_P (regno))
4354 return
4355 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4356
8521c414
JM
4357 /* The value returned for SCmode in the E500 double case is 2 for
4358 ABI compatibility; storing an SCmode value in a single register
4359 would require function_arg and rs6000_spe_function_arg to handle
4360 SCmode so as to pass the value correctly in a pair of
4361 registers. */
4362 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4363 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4364
d8ecbcdb
AH
4365 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4366}
2aa4498c
AH
4367
4368/* Change register usage conditional on target flags. */
4369void
4370rs6000_conditional_register_usage (void)
4371{
4372 int i;
4373
4374 /* Set MQ register fixed (already call_used) if not POWER
4375 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4376 be allocated. */
4377 if (! TARGET_POWER)
4378 fixed_regs[64] = 1;
4379
7c9ac5c0 4380 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4381 if (TARGET_64BIT)
4382 fixed_regs[13] = call_used_regs[13]
4383 = call_really_used_regs[13] = 1;
4384
4385 /* Conditionally disable FPRs. */
4386 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4387 for (i = 32; i < 64; i++)
4388 fixed_regs[i] = call_used_regs[i]
c4ad648e 4389 = call_really_used_regs[i] = 1;
2aa4498c 4390
7c9ac5c0
PH
4391 /* The TOC register is not killed across calls in a way that is
4392 visible to the compiler. */
4393 if (DEFAULT_ABI == ABI_AIX)
4394 call_really_used_regs[2] = 0;
4395
2aa4498c
AH
4396 if (DEFAULT_ABI == ABI_V4
4397 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4398 && flag_pic == 2)
4399 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4400
4401 if (DEFAULT_ABI == ABI_V4
4402 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4403 && flag_pic == 1)
4404 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4405 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4406 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4407
4408 if (DEFAULT_ABI == ABI_DARWIN
4409 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4410 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4411 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4412 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4413
b4db40bf
JJ
4414 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4415 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4416 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4417
2aa4498c
AH
4418 if (TARGET_SPE)
4419 {
4420 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4421 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4422 registers in prologues and epilogues. We no longer use r14
4423 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4424 pool for link-compatibility with older versions of GCC. Once
4425 "old" code has died out, we can return r14 to the allocation
4426 pool. */
4427 fixed_regs[14]
4428 = call_used_regs[14]
4429 = call_really_used_regs[14] = 1;
2aa4498c
AH
4430 }
4431
0db747be 4432 if (!TARGET_ALTIVEC)
2aa4498c
AH
4433 {
4434 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4435 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4436 call_really_used_regs[VRSAVE_REGNO] = 1;
4437 }
4438
0db747be
DE
4439 if (TARGET_ALTIVEC)
4440 global_regs[VSCR_REGNO] = 1;
4441
2aa4498c 4442 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4443 {
4444 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4445 call_used_regs[i] = call_really_used_regs[i] = 1;
4446
4447 /* AIX reserves VR20:31 in non-extended ABI mode. */
4448 if (TARGET_XCOFF)
4449 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4450 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4451 }
2aa4498c 4452}
fb4d4348 4453\f
a4f6c312
SS
4454/* Try to output insns to set TARGET equal to the constant C if it can
4455 be done in less than N insns. Do all computations in MODE.
4456 Returns the place where the output has been placed if it can be
4457 done and the insns have been emitted. If it would take more than N
4458 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4459
4460rtx
f676971a 4461rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4462 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4463{
af8cb5c5 4464 rtx result, insn, set;
2bfcf297
DB
4465 HOST_WIDE_INT c0, c1;
4466
37409796 4467 switch (mode)
2bfcf297 4468 {
37409796
NS
4469 case QImode:
4470 case HImode:
2bfcf297 4471 if (dest == NULL)
c4ad648e 4472 dest = gen_reg_rtx (mode);
2bfcf297
DB
4473 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4474 return dest;
bb8df8a6 4475
37409796 4476 case SImode:
b3a13419 4477 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4478
d448860e 4479 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4480 GEN_INT (INTVAL (source)
4481 & (~ (HOST_WIDE_INT) 0xffff))));
4482 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4483 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4484 GEN_INT (INTVAL (source) & 0xffff))));
4485 result = dest;
37409796
NS
4486 break;
4487
4488 case DImode:
4489 switch (GET_CODE (source))
af8cb5c5 4490 {
37409796 4491 case CONST_INT:
af8cb5c5
DE
4492 c0 = INTVAL (source);
4493 c1 = -(c0 < 0);
37409796 4494 break;
bb8df8a6 4495
37409796 4496 case CONST_DOUBLE:
2bfcf297 4497#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4498 c0 = CONST_DOUBLE_LOW (source);
4499 c1 = -(c0 < 0);
2bfcf297 4500#else
af8cb5c5
DE
4501 c0 = CONST_DOUBLE_LOW (source);
4502 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4503#endif
37409796
NS
4504 break;
4505
4506 default:
4507 gcc_unreachable ();
af8cb5c5 4508 }
af8cb5c5
DE
4509
4510 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4511 break;
4512
4513 default:
4514 gcc_unreachable ();
2bfcf297 4515 }
2bfcf297 4516
af8cb5c5
DE
4517 insn = get_last_insn ();
4518 set = single_set (insn);
4519 if (! CONSTANT_P (SET_SRC (set)))
4520 set_unique_reg_note (insn, REG_EQUAL, source);
4521
4522 return result;
2bfcf297
DB
4523}
4524
4525/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4526 fall back to a straight forward decomposition. We do this to avoid
4527 exponential run times encountered when looking for longer sequences
4528 with rs6000_emit_set_const. */
4529static rtx
a2369ed3 4530rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4531{
4532 if (!TARGET_POWERPC64)
4533 {
4534 rtx operand1, operand2;
4535
4536 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4537 DImode);
d448860e 4538 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4539 DImode);
4540 emit_move_insn (operand1, GEN_INT (c1));
4541 emit_move_insn (operand2, GEN_INT (c2));
4542 }
4543 else
4544 {
bc06712d 4545 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4546
bc06712d 4547 ud1 = c1 & 0xffff;
f921c9c9 4548 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4549#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4550 c2 = c1 >> 32;
2bfcf297 4551#endif
bc06712d 4552 ud3 = c2 & 0xffff;
f921c9c9 4553 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4554
f676971a 4555 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4556 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4557 {
bc06712d 4558 if (ud1 & 0x8000)
b78d48dd 4559 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4560 else
4561 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4562 }
2bfcf297 4563
f676971a 4564 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4565 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4566 {
bc06712d 4567 if (ud2 & 0x8000)
f676971a 4568 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4569 - 0x80000000));
252b88f7 4570 else
bc06712d
TR
4571 emit_move_insn (dest, GEN_INT (ud2 << 16));
4572 if (ud1 != 0)
d448860e
JH
4573 emit_move_insn (copy_rtx (dest),
4574 gen_rtx_IOR (DImode, copy_rtx (dest),
4575 GEN_INT (ud1)));
252b88f7 4576 }
f676971a 4577 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4578 || (ud4 == 0 && ! (ud3 & 0x8000)))
4579 {
4580 if (ud3 & 0x8000)
f676971a 4581 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4582 - 0x80000000));
4583 else
4584 emit_move_insn (dest, GEN_INT (ud3 << 16));
4585
4586 if (ud2 != 0)
d448860e
JH
4587 emit_move_insn (copy_rtx (dest),
4588 gen_rtx_IOR (DImode, copy_rtx (dest),
4589 GEN_INT (ud2)));
4590 emit_move_insn (copy_rtx (dest),
4591 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4592 GEN_INT (16)));
bc06712d 4593 if (ud1 != 0)
d448860e
JH
4594 emit_move_insn (copy_rtx (dest),
4595 gen_rtx_IOR (DImode, copy_rtx (dest),
4596 GEN_INT (ud1)));
bc06712d 4597 }
f676971a 4598 else
bc06712d
TR
4599 {
4600 if (ud4 & 0x8000)
f676971a 4601 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4602 - 0x80000000));
4603 else
4604 emit_move_insn (dest, GEN_INT (ud4 << 16));
4605
4606 if (ud3 != 0)
d448860e
JH
4607 emit_move_insn (copy_rtx (dest),
4608 gen_rtx_IOR (DImode, copy_rtx (dest),
4609 GEN_INT (ud3)));
2bfcf297 4610
d448860e
JH
4611 emit_move_insn (copy_rtx (dest),
4612 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4613 GEN_INT (32)));
bc06712d 4614 if (ud2 != 0)
d448860e
JH
4615 emit_move_insn (copy_rtx (dest),
4616 gen_rtx_IOR (DImode, copy_rtx (dest),
4617 GEN_INT (ud2 << 16)));
bc06712d 4618 if (ud1 != 0)
d448860e
JH
4619 emit_move_insn (copy_rtx (dest),
4620 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4621 }
4622 }
2bfcf297
DB
4623 return dest;
4624}
4625
76d2b81d 4626/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4627 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4628
4629static void
4630rs6000_eliminate_indexed_memrefs (rtx operands[2])
4631{
4632 if (GET_CODE (operands[0]) == MEM
4633 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4634 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4635 && ! reload_in_progress)
4636 operands[0]
4637 = replace_equiv_address (operands[0],
4638 copy_addr_to_reg (XEXP (operands[0], 0)));
4639
4640 if (GET_CODE (operands[1]) == MEM
4641 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4642 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4643 && ! reload_in_progress)
4644 operands[1]
4645 = replace_equiv_address (operands[1],
4646 copy_addr_to_reg (XEXP (operands[1], 0)));
4647}
4648
fb4d4348
GK
4649/* Emit a move from SOURCE to DEST in mode MODE. */
4650void
a2369ed3 4651rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4652{
4653 rtx operands[2];
4654 operands[0] = dest;
4655 operands[1] = source;
f676971a 4656
fb4d4348
GK
4657 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4658 if (GET_CODE (operands[1]) == CONST_DOUBLE
4659 && ! FLOAT_MODE_P (mode)
4660 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4661 {
4662 /* FIXME. This should never happen. */
4663 /* Since it seems that it does, do the safe thing and convert
4664 to a CONST_INT. */
2496c7bd 4665 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4666 }
37409796
NS
4667 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4668 || FLOAT_MODE_P (mode)
4669 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4670 || CONST_DOUBLE_LOW (operands[1]) < 0)
4671 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4672 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4673
c9e8cb32
DD
4674 /* Check if GCC is setting up a block move that will end up using FP
4675 registers as temporaries. We must make sure this is acceptable. */
4676 if (GET_CODE (operands[0]) == MEM
4677 && GET_CODE (operands[1]) == MEM
4678 && mode == DImode
41543739
GK
4679 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4680 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4681 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4682 ? 32 : MEM_ALIGN (operands[0])))
4683 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4684 ? 32
41543739
GK
4685 : MEM_ALIGN (operands[1]))))
4686 && ! MEM_VOLATILE_P (operands [0])
4687 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4688 {
41543739
GK
4689 emit_move_insn (adjust_address (operands[0], SImode, 0),
4690 adjust_address (operands[1], SImode, 0));
d448860e
JH
4691 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4692 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4693 return;
4694 }
630d42a0 4695
b3a13419 4696 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4697 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4698 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4699
a3170dc6
AH
4700 if (mode == SFmode && ! TARGET_POWERPC
4701 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4702 && GET_CODE (operands[0]) == MEM)
fb4d4348 4703 {
ffc14f31
GK
4704 int regnum;
4705
4706 if (reload_in_progress || reload_completed)
4707 regnum = true_regnum (operands[1]);
4708 else if (GET_CODE (operands[1]) == REG)
4709 regnum = REGNO (operands[1]);
4710 else
4711 regnum = -1;
f676971a 4712
fb4d4348
GK
4713 /* If operands[1] is a register, on POWER it may have
4714 double-precision data in it, so truncate it to single
4715 precision. */
4716 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4717 {
4718 rtx newreg;
b3a13419 4719 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4720 : gen_reg_rtx (mode));
fb4d4348
GK
4721 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4722 operands[1] = newreg;
4723 }
4724 }
4725
c4501e62
JJ
4726 /* Recognize the case where operand[1] is a reference to thread-local
4727 data and load its address to a register. */
84f52ebd 4728 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4729 {
84f52ebd
RH
4730 enum tls_model model;
4731 rtx tmp = operands[1];
4732 rtx addend = NULL;
4733
4734 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4735 {
4736 addend = XEXP (XEXP (tmp, 0), 1);
4737 tmp = XEXP (XEXP (tmp, 0), 0);
4738 }
4739
4740 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4741 model = SYMBOL_REF_TLS_MODEL (tmp);
4742 gcc_assert (model != 0);
4743
4744 tmp = rs6000_legitimize_tls_address (tmp, model);
4745 if (addend)
4746 {
4747 tmp = gen_rtx_PLUS (mode, tmp, addend);
4748 tmp = force_operand (tmp, operands[0]);
4749 }
4750 operands[1] = tmp;
c4501e62
JJ
4751 }
4752
8f4e6caf
RH
4753 /* Handle the case where reload calls us with an invalid address. */
4754 if (reload_in_progress && mode == Pmode
69ef87e2 4755 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4756 || ! nonimmediate_operand (operands[0], mode)))
4757 goto emit_set;
4758
a9baceb1
GK
4759 /* 128-bit constant floating-point values on Darwin should really be
4760 loaded as two parts. */
8521c414 4761 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4762 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4763 {
4764 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4765 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4766 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4767 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4768 simplify_gen_subreg (imode, operands[1], mode, 0),
4769 imode);
4770 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4771 GET_MODE_SIZE (imode)),
4772 simplify_gen_subreg (imode, operands[1], mode,
4773 GET_MODE_SIZE (imode)),
4774 imode);
a9baceb1
GK
4775 return;
4776 }
4777
e41b2a33
PB
4778 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4779 cfun->machine->sdmode_stack_slot =
4780 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4781
4782 if (reload_in_progress
4783 && mode == SDmode
4784 && MEM_P (operands[0])
4785 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4786 && REG_P (operands[1]))
4787 {
4788 if (FP_REGNO_P (REGNO (operands[1])))
4789 {
4790 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4791 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4792 emit_insn (gen_movsd_store (mem, operands[1]));
4793 }
4794 else if (INT_REGNO_P (REGNO (operands[1])))
4795 {
4796 rtx mem = adjust_address_nv (operands[0], mode, 4);
4797 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4798 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4799 }
4800 else
4801 gcc_unreachable();
4802 return;
4803 }
4804 if (reload_in_progress
4805 && mode == SDmode
4806 && REG_P (operands[0])
4807 && MEM_P (operands[1])
4808 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4809 {
4810 if (FP_REGNO_P (REGNO (operands[0])))
4811 {
4812 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4813 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4814 emit_insn (gen_movsd_load (operands[0], mem));
4815 }
4816 else if (INT_REGNO_P (REGNO (operands[0])))
4817 {
4818 rtx mem = adjust_address_nv (operands[1], mode, 4);
4819 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4820 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4821 }
4822 else
4823 gcc_unreachable();
4824 return;
4825 }
4826
fb4d4348
GK
4827 /* FIXME: In the long term, this switch statement should go away
4828 and be replaced by a sequence of tests based on things like
4829 mode == Pmode. */
4830 switch (mode)
4831 {
4832 case HImode:
4833 case QImode:
4834 if (CONSTANT_P (operands[1])
4835 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4836 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4837 break;
4838
06f4e019 4839 case TFmode:
7393f7f8 4840 case TDmode:
76d2b81d
DJ
4841 rs6000_eliminate_indexed_memrefs (operands);
4842 /* fall through */
4843
fb4d4348 4844 case DFmode:
7393f7f8 4845 case DDmode:
fb4d4348 4846 case SFmode:
e41b2a33 4847 case SDmode:
f676971a 4848 if (CONSTANT_P (operands[1])
fb4d4348 4849 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4850 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4851 break;
f676971a 4852
0ac081f6
AH
4853 case V16QImode:
4854 case V8HImode:
4855 case V4SFmode:
4856 case V4SImode:
a3170dc6
AH
4857 case V4HImode:
4858 case V2SFmode:
4859 case V2SImode:
00a892b8 4860 case V1DImode:
69ef87e2 4861 if (CONSTANT_P (operands[1])
d744e06e 4862 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4863 operands[1] = force_const_mem (mode, operands[1]);
4864 break;
f676971a 4865
fb4d4348 4866 case SImode:
a9098fd0 4867 case DImode:
fb4d4348
GK
4868 /* Use default pattern for address of ELF small data */
4869 if (TARGET_ELF
a9098fd0 4870 && mode == Pmode
f607bc57 4871 && DEFAULT_ABI == ABI_V4
f676971a 4872 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4873 || GET_CODE (operands[1]) == CONST)
4874 && small_data_operand (operands[1], mode))
fb4d4348
GK
4875 {
4876 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4877 return;
4878 }
4879
f607bc57 4880 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4881 && mode == Pmode && mode == SImode
4882 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4883 {
4884 emit_insn (gen_movsi_got (operands[0], operands[1]));
4885 return;
4886 }
4887
ee890fe2 4888 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4889 && TARGET_NO_TOC
4890 && ! flag_pic
a9098fd0 4891 && mode == Pmode
fb4d4348
GK
4892 && CONSTANT_P (operands[1])
4893 && GET_CODE (operands[1]) != HIGH
4894 && GET_CODE (operands[1]) != CONST_INT)
4895 {
b3a13419
ILT
4896 rtx target = (!can_create_pseudo_p ()
4897 ? operands[0]
4898 : gen_reg_rtx (mode));
fb4d4348
GK
4899
4900 /* If this is a function address on -mcall-aixdesc,
4901 convert it to the address of the descriptor. */
4902 if (DEFAULT_ABI == ABI_AIX
4903 && GET_CODE (operands[1]) == SYMBOL_REF
4904 && XSTR (operands[1], 0)[0] == '.')
4905 {
4906 const char *name = XSTR (operands[1], 0);
4907 rtx new_ref;
4908 while (*name == '.')
4909 name++;
4910 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4911 CONSTANT_POOL_ADDRESS_P (new_ref)
4912 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4913 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4914 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4915 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4916 operands[1] = new_ref;
4917 }
7509c759 4918
ee890fe2
SS
4919 if (DEFAULT_ABI == ABI_DARWIN)
4920 {
ab82a49f
AP
4921#if TARGET_MACHO
4922 if (MACHO_DYNAMIC_NO_PIC_P)
4923 {
4924 /* Take care of any required data indirection. */
4925 operands[1] = rs6000_machopic_legitimize_pic_address (
4926 operands[1], mode, operands[0]);
4927 if (operands[0] != operands[1])
4928 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4929 operands[0], operands[1]));
ab82a49f
AP
4930 return;
4931 }
4932#endif
b8a55285
AP
4933 emit_insn (gen_macho_high (target, operands[1]));
4934 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4935 return;
4936 }
4937
fb4d4348
GK
4938 emit_insn (gen_elf_high (target, operands[1]));
4939 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4940 return;
4941 }
4942
a9098fd0
GK
4943 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4944 and we have put it in the TOC, we just need to make a TOC-relative
4945 reference to it. */
4946 if (TARGET_TOC
4947 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4948 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4949 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4950 get_pool_mode (operands[1])))
fb4d4348 4951 {
a9098fd0 4952 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4953 }
a9098fd0
GK
4954 else if (mode == Pmode
4955 && CONSTANT_P (operands[1])
38886f37
AO
4956 && ((GET_CODE (operands[1]) != CONST_INT
4957 && ! easy_fp_constant (operands[1], mode))
4958 || (GET_CODE (operands[1]) == CONST_INT
4959 && num_insns_constant (operands[1], mode) > 2)
4960 || (GET_CODE (operands[0]) == REG
4961 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4962 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4963 && ! legitimate_constant_pool_address_p (operands[1])
4964 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4965 {
4966 /* Emit a USE operation so that the constant isn't deleted if
4967 expensive optimizations are turned on because nobody
4968 references it. This should only be done for operands that
4969 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4970 This should not be done for operands that contain LABEL_REFs.
4971 For now, we just handle the obvious case. */
4972 if (GET_CODE (operands[1]) != LABEL_REF)
4973 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4974
c859cda6 4975#if TARGET_MACHO
ee890fe2 4976 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4977 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4978 {
ee890fe2
SS
4979 operands[1] =
4980 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4981 operands[0]);
4982 if (operands[0] != operands[1])
4983 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4984 return;
4985 }
c859cda6 4986#endif
ee890fe2 4987
fb4d4348
GK
4988 /* If we are to limit the number of things we put in the TOC and
4989 this is a symbol plus a constant we can add in one insn,
4990 just put the symbol in the TOC and add the constant. Don't do
4991 this if reload is in progress. */
4992 if (GET_CODE (operands[1]) == CONST
4993 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4994 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4995 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4996 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4997 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4998 && ! side_effects_p (operands[0]))
4999 {
a4f6c312
SS
5000 rtx sym =
5001 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5002 rtx other = XEXP (XEXP (operands[1], 0), 1);
5003
a9098fd0
GK
5004 sym = force_reg (mode, sym);
5005 if (mode == SImode)
5006 emit_insn (gen_addsi3 (operands[0], sym, other));
5007 else
5008 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5009 return;
5010 }
5011
a9098fd0 5012 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5013
f676971a 5014 if (TARGET_TOC
4d588c14 5015 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5016 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5017 get_pool_constant (XEXP (operands[1], 0)),
5018 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5019 {
ba4828e0 5020 operands[1]
542a8afa 5021 = gen_const_mem (mode,
c4ad648e 5022 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5023 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5024 }
fb4d4348
GK
5025 }
5026 break;
a9098fd0 5027
fb4d4348 5028 case TImode:
76d2b81d
DJ
5029 rs6000_eliminate_indexed_memrefs (operands);
5030
27dc0551
DE
5031 if (TARGET_POWER)
5032 {
5033 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5034 gen_rtvec (2,
5035 gen_rtx_SET (VOIDmode,
5036 operands[0], operands[1]),
5037 gen_rtx_CLOBBER (VOIDmode,
5038 gen_rtx_SCRATCH (SImode)))));
5039 return;
5040 }
fb4d4348
GK
5041 break;
5042
5043 default:
37409796 5044 gcc_unreachable ();
fb4d4348
GK
5045 }
5046
a9098fd0
GK
5047 /* Above, we may have called force_const_mem which may have returned
5048 an invalid address. If we can, fix this up; otherwise, reload will
5049 have to deal with it. */
8f4e6caf
RH
5050 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5051 operands[1] = validize_mem (operands[1]);
a9098fd0 5052
8f4e6caf 5053 emit_set:
fb4d4348
GK
5054 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5055}
4697a36c 5056\f
2858f73a
GK
5057/* Nonzero if we can use a floating-point register to pass this arg. */
5058#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5059 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5060 && (CUM)->fregno <= FP_ARG_MAX_REG \
5061 && TARGET_HARD_FLOAT && TARGET_FPRS)
5062
5063/* Nonzero if we can use an AltiVec register to pass this arg. */
5064#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5065 (ALTIVEC_VECTOR_MODE (MODE) \
5066 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5067 && TARGET_ALTIVEC_ABI \
83953138 5068 && (NAMED))
2858f73a 5069
c6e8c921
GK
5070/* Return a nonzero value to say to return the function value in
5071 memory, just as large structures are always returned. TYPE will be
5072 the data type of the value, and FNTYPE will be the type of the
5073 function doing the returning, or @code{NULL} for libcalls.
5074
5075 The AIX ABI for the RS/6000 specifies that all structures are
5076 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5077 specifies that structures <= 8 bytes are returned in r3/r4, but a
5078 draft put them in memory, and GCC used to implement the draft
df01da37 5079 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5080 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5081 compatibility can change DRAFT_V4_STRUCT_RET to override the
5082 default, and -m switches get the final word. See
5083 rs6000_override_options for more details.
5084
5085 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5086 long double support is enabled. These values are returned in memory.
5087
5088 int_size_in_bytes returns -1 for variable size objects, which go in
5089 memory always. The cast to unsigned makes -1 > 8. */
5090
5091static bool
586de218 5092rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5093{
594a51fe
SS
5094 /* In the darwin64 abi, try to use registers for larger structs
5095 if possible. */
0b5383eb 5096 if (rs6000_darwin64_abi
594a51fe 5097 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5098 && int_size_in_bytes (type) > 0)
5099 {
5100 CUMULATIVE_ARGS valcum;
5101 rtx valret;
5102
5103 valcum.words = 0;
5104 valcum.fregno = FP_ARG_MIN_REG;
5105 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5106 /* Do a trial code generation as if this were going to be passed
5107 as an argument; if any part goes in memory, we return NULL. */
5108 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5109 if (valret)
5110 return false;
5111 /* Otherwise fall through to more conventional ABI rules. */
5112 }
594a51fe 5113
c6e8c921 5114 if (AGGREGATE_TYPE_P (type)
df01da37 5115 && (aix_struct_return
c6e8c921
GK
5116 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5117 return true;
b693336b 5118
bada2eb8
DE
5119 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5120 modes only exist for GCC vector types if -maltivec. */
5121 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5122 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5123 return false;
5124
b693336b
PB
5125 /* Return synthetic vectors in memory. */
5126 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5127 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5128 {
5129 static bool warned_for_return_big_vectors = false;
5130 if (!warned_for_return_big_vectors)
5131 {
d4ee4d25 5132 warning (0, "GCC vector returned by reference: "
b693336b
PB
5133 "non-standard ABI extension with no compatibility guarantee");
5134 warned_for_return_big_vectors = true;
5135 }
5136 return true;
5137 }
5138
602ea4d3 5139 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5140 return true;
ad630bef 5141
c6e8c921
GK
5142 return false;
5143}
5144
4697a36c
MM
5145/* Initialize a variable CUM of type CUMULATIVE_ARGS
5146 for a call to a function whose data type is FNTYPE.
5147 For a library call, FNTYPE is 0.
5148
5149 For incoming args we set the number of arguments in the prototype large
1c20ae99 5150 so we never return a PARALLEL. */
4697a36c
MM
5151
5152void
f676971a 5153init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5154 rtx libname ATTRIBUTE_UNUSED, int incoming,
5155 int libcall, int n_named_args)
4697a36c
MM
5156{
5157 static CUMULATIVE_ARGS zero_cumulative;
5158
5159 *cum = zero_cumulative;
5160 cum->words = 0;
5161 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5162 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5163 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5164 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5165 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5166 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5167 cum->stdarg = fntype
5168 && (TYPE_ARG_TYPES (fntype) != 0
5169 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5170 != void_type_node));
4697a36c 5171
0f6937fe
AM
5172 cum->nargs_prototype = 0;
5173 if (incoming || cum->prototype)
5174 cum->nargs_prototype = n_named_args;
4697a36c 5175
a5c76ee6 5176 /* Check for a longcall attribute. */
3eb4e360
AM
5177 if ((!fntype && rs6000_default_long_calls)
5178 || (fntype
5179 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5180 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5181 cum->call_cookie |= CALL_LONG;
6a4cee5f 5182
4697a36c
MM
5183 if (TARGET_DEBUG_ARG)
5184 {
5185 fprintf (stderr, "\ninit_cumulative_args:");
5186 if (fntype)
5187 {
5188 tree ret_type = TREE_TYPE (fntype);
5189 fprintf (stderr, " ret code = %s,",
5190 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5191 }
5192
6a4cee5f
MM
5193 if (cum->call_cookie & CALL_LONG)
5194 fprintf (stderr, " longcall,");
5195
4697a36c
MM
5196 fprintf (stderr, " proto = %d, nargs = %d\n",
5197 cum->prototype, cum->nargs_prototype);
5198 }
f676971a 5199
c4ad648e
AM
5200 if (fntype
5201 && !TARGET_ALTIVEC
5202 && TARGET_ALTIVEC_ABI
5203 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5204 {
c85ce869 5205 error ("cannot return value in vector register because"
c4ad648e 5206 " altivec instructions are disabled, use -maltivec"
c85ce869 5207 " to enable them");
c4ad648e 5208 }
4697a36c
MM
5209}
5210\f
fe984136
RH
5211/* Return true if TYPE must be passed on the stack and not in registers. */
5212
5213static bool
586de218 5214rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5215{
5216 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5217 return must_pass_in_stack_var_size (mode, type);
5218 else
5219 return must_pass_in_stack_var_size_or_pad (mode, type);
5220}
5221
c229cba9
DE
5222/* If defined, a C expression which determines whether, and in which
5223 direction, to pad out an argument with extra space. The value
5224 should be of type `enum direction': either `upward' to pad above
5225 the argument, `downward' to pad below, or `none' to inhibit
5226 padding.
5227
5228 For the AIX ABI structs are always stored left shifted in their
5229 argument slot. */
5230
9ebbca7d 5231enum direction
586de218 5232function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5233{
6e985040
AM
5234#ifndef AGGREGATE_PADDING_FIXED
5235#define AGGREGATE_PADDING_FIXED 0
5236#endif
5237#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5238#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5239#endif
5240
5241 if (!AGGREGATE_PADDING_FIXED)
5242 {
5243 /* GCC used to pass structures of the same size as integer types as
5244 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5245 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5246 passed padded downward, except that -mstrict-align further
5247 muddied the water in that multi-component structures of 2 and 4
5248 bytes in size were passed padded upward.
5249
5250 The following arranges for best compatibility with previous
5251 versions of gcc, but removes the -mstrict-align dependency. */
5252 if (BYTES_BIG_ENDIAN)
5253 {
5254 HOST_WIDE_INT size = 0;
5255
5256 if (mode == BLKmode)
5257 {
5258 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5259 size = int_size_in_bytes (type);
5260 }
5261 else
5262 size = GET_MODE_SIZE (mode);
5263
5264 if (size == 1 || size == 2 || size == 4)
5265 return downward;
5266 }
5267 return upward;
5268 }
5269
5270 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5271 {
5272 if (type != 0 && AGGREGATE_TYPE_P (type))
5273 return upward;
5274 }
c229cba9 5275
d3704c46
KH
5276 /* Fall back to the default. */
5277 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5278}
5279
b6c9286a 5280/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5281 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5282 PARM_BOUNDARY is used for all arguments.
f676971a 5283
84e9ad15
AM
5284 V.4 wants long longs and doubles to be double word aligned. Just
5285 testing the mode size is a boneheaded way to do this as it means
5286 that other types such as complex int are also double word aligned.
5287 However, we're stuck with this because changing the ABI might break
5288 existing library interfaces.
5289
b693336b
PB
5290 Doubleword align SPE vectors.
5291 Quadword align Altivec vectors.
5292 Quadword align large synthetic vector types. */
b6c9286a
MM
5293
5294int
b693336b 5295function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5296{
84e9ad15
AM
5297 if (DEFAULT_ABI == ABI_V4
5298 && (GET_MODE_SIZE (mode) == 8
5299 || (TARGET_HARD_FLOAT
5300 && TARGET_FPRS
7393f7f8 5301 && (mode == TFmode || mode == TDmode))))
4ed78545 5302 return 64;
ad630bef
DE
5303 else if (SPE_VECTOR_MODE (mode)
5304 || (type && TREE_CODE (type) == VECTOR_TYPE
5305 && int_size_in_bytes (type) >= 8
5306 && int_size_in_bytes (type) < 16))
e1f83b4d 5307 return 64;
ad630bef
DE
5308 else if (ALTIVEC_VECTOR_MODE (mode)
5309 || (type && TREE_CODE (type) == VECTOR_TYPE
5310 && int_size_in_bytes (type) >= 16))
0ac081f6 5311 return 128;
0b5383eb
DJ
5312 else if (rs6000_darwin64_abi && mode == BLKmode
5313 && type && TYPE_ALIGN (type) > 64)
5314 return 128;
9ebbca7d 5315 else
b6c9286a 5316 return PARM_BOUNDARY;
b6c9286a 5317}
c53bdcf5 5318
294bd182
AM
5319/* For a function parm of MODE and TYPE, return the starting word in
5320 the parameter area. NWORDS of the parameter area are already used. */
5321
5322static unsigned int
5323rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5324{
5325 unsigned int align;
5326 unsigned int parm_offset;
5327
5328 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5329 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5330 return nwords + (-(parm_offset + nwords) & align);
5331}
5332
c53bdcf5
AM
5333/* Compute the size (in words) of a function argument. */
5334
5335static unsigned long
5336rs6000_arg_size (enum machine_mode mode, tree type)
5337{
5338 unsigned long size;
5339
5340 if (mode != BLKmode)
5341 size = GET_MODE_SIZE (mode);
5342 else
5343 size = int_size_in_bytes (type);
5344
5345 if (TARGET_32BIT)
5346 return (size + 3) >> 2;
5347 else
5348 return (size + 7) >> 3;
5349}
b6c9286a 5350\f
0b5383eb 5351/* Use this to flush pending int fields. */
594a51fe
SS
5352
5353static void
0b5383eb
DJ
5354rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5355 HOST_WIDE_INT bitpos)
594a51fe 5356{
0b5383eb
DJ
5357 unsigned int startbit, endbit;
5358 int intregs, intoffset;
5359 enum machine_mode mode;
594a51fe 5360
0b5383eb
DJ
5361 if (cum->intoffset == -1)
5362 return;
594a51fe 5363
0b5383eb
DJ
5364 intoffset = cum->intoffset;
5365 cum->intoffset = -1;
5366
5367 if (intoffset % BITS_PER_WORD != 0)
5368 {
5369 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5370 MODE_INT, 0);
5371 if (mode == BLKmode)
594a51fe 5372 {
0b5383eb
DJ
5373 /* We couldn't find an appropriate mode, which happens,
5374 e.g., in packed structs when there are 3 bytes to load.
5375 Back intoffset back to the beginning of the word in this
5376 case. */
5377 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5378 }
594a51fe 5379 }
0b5383eb
DJ
5380
5381 startbit = intoffset & -BITS_PER_WORD;
5382 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5383 intregs = (endbit - startbit) / BITS_PER_WORD;
5384 cum->words += intregs;
5385}
5386
5387/* The darwin64 ABI calls for us to recurse down through structs,
5388 looking for elements passed in registers. Unfortunately, we have
5389 to track int register count here also because of misalignments
5390 in powerpc alignment mode. */
5391
5392static void
5393rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5394 tree type,
5395 HOST_WIDE_INT startbitpos)
5396{
5397 tree f;
5398
5399 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5400 if (TREE_CODE (f) == FIELD_DECL)
5401 {
5402 HOST_WIDE_INT bitpos = startbitpos;
5403 tree ftype = TREE_TYPE (f);
70fb00df
AP
5404 enum machine_mode mode;
5405 if (ftype == error_mark_node)
5406 continue;
5407 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5408
5409 if (DECL_SIZE (f) != 0
5410 && host_integerp (bit_position (f), 1))
5411 bitpos += int_bit_position (f);
5412
5413 /* ??? FIXME: else assume zero offset. */
5414
5415 if (TREE_CODE (ftype) == RECORD_TYPE)
5416 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5417 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5418 {
5419 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5420 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5421 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5422 }
5423 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5424 {
5425 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5426 cum->vregno++;
5427 cum->words += 2;
5428 }
5429 else if (cum->intoffset == -1)
5430 cum->intoffset = bitpos;
5431 }
594a51fe
SS
5432}
5433
4697a36c
MM
5434/* Update the data in CUM to advance over an argument
5435 of mode MODE and data type TYPE.
b2d04ecf
AM
5436 (TYPE is null for libcalls where that information may not be available.)
5437
5438 Note that for args passed by reference, function_arg will be called
5439 with MODE and TYPE set to that of the pointer to the arg, not the arg
5440 itself. */
4697a36c
MM
5441
5442void
f676971a 5443function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5444 tree type, int named, int depth)
4697a36c 5445{
0b5383eb
DJ
5446 int size;
5447
594a51fe
SS
5448 /* Only tick off an argument if we're not recursing. */
5449 if (depth == 0)
5450 cum->nargs_prototype--;
4697a36c 5451
ad630bef
DE
5452 if (TARGET_ALTIVEC_ABI
5453 && (ALTIVEC_VECTOR_MODE (mode)
5454 || (type && TREE_CODE (type) == VECTOR_TYPE
5455 && int_size_in_bytes (type) == 16)))
0ac081f6 5456 {
4ed78545
AM
5457 bool stack = false;
5458
2858f73a 5459 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5460 {
6d0ef01e
HP
5461 cum->vregno++;
5462 if (!TARGET_ALTIVEC)
c85ce869 5463 error ("cannot pass argument in vector register because"
6d0ef01e 5464 " altivec instructions are disabled, use -maltivec"
c85ce869 5465 " to enable them");
4ed78545
AM
5466
5467 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5468 even if it is going to be passed in a vector register.
4ed78545
AM
5469 Darwin does the same for variable-argument functions. */
5470 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5471 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5472 stack = true;
6d0ef01e 5473 }
4ed78545
AM
5474 else
5475 stack = true;
5476
5477 if (stack)
c4ad648e 5478 {
a594a19c 5479 int align;
f676971a 5480
2858f73a
GK
5481 /* Vector parameters must be 16-byte aligned. This places
5482 them at 2 mod 4 in terms of words in 32-bit mode, since
5483 the parameter save area starts at offset 24 from the
5484 stack. In 64-bit mode, they just have to start on an
5485 even word, since the parameter save area is 16-byte
5486 aligned. Space for GPRs is reserved even if the argument
5487 will be passed in memory. */
5488 if (TARGET_32BIT)
4ed78545 5489 align = (2 - cum->words) & 3;
2858f73a
GK
5490 else
5491 align = cum->words & 1;
c53bdcf5 5492 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5493
a594a19c
GK
5494 if (TARGET_DEBUG_ARG)
5495 {
f676971a 5496 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5497 cum->words, align);
5498 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5499 cum->nargs_prototype, cum->prototype,
2858f73a 5500 GET_MODE_NAME (mode));
a594a19c
GK
5501 }
5502 }
0ac081f6 5503 }
a4b0320c 5504 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5505 && !cum->stdarg
5506 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5507 cum->sysv_gregno++;
594a51fe
SS
5508
5509 else if (rs6000_darwin64_abi
5510 && mode == BLKmode
0b5383eb
DJ
5511 && TREE_CODE (type) == RECORD_TYPE
5512 && (size = int_size_in_bytes (type)) > 0)
5513 {
5514 /* Variable sized types have size == -1 and are
5515 treated as if consisting entirely of ints.
5516 Pad to 16 byte boundary if needed. */
5517 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5518 && (cum->words % 2) != 0)
5519 cum->words++;
5520 /* For varargs, we can just go up by the size of the struct. */
5521 if (!named)
5522 cum->words += (size + 7) / 8;
5523 else
5524 {
5525 /* It is tempting to say int register count just goes up by
5526 sizeof(type)/8, but this is wrong in a case such as
5527 { int; double; int; } [powerpc alignment]. We have to
5528 grovel through the fields for these too. */
5529 cum->intoffset = 0;
5530 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5531 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5532 size * BITS_PER_UNIT);
5533 }
5534 }
f607bc57 5535 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5536 {
a3170dc6 5537 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5538 && (mode == SFmode || mode == DFmode
e41b2a33 5539 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5540 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5541 {
2d83f070
JJ
5542 /* _Decimal128 must use an even/odd register pair. This assumes
5543 that the register number is odd when fregno is odd. */
5544 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5545 cum->fregno++;
5546
5547 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5548 <= FP_ARG_V4_MAX_REG)
602ea4d3 5549 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5550 else
5551 {
602ea4d3 5552 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5553 if (mode == DFmode || mode == TFmode
5554 || mode == DDmode || mode == TDmode)
c4ad648e 5555 cum->words += cum->words & 1;
c53bdcf5 5556 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5557 }
4697a36c 5558 }
4cc833b7
RH
5559 else
5560 {
b2d04ecf 5561 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5562 int gregno = cum->sysv_gregno;
5563
4ed78545
AM
5564 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5565 (r7,r8) or (r9,r10). As does any other 2 word item such
5566 as complex int due to a historical mistake. */
5567 if (n_words == 2)
5568 gregno += (1 - gregno) & 1;
4cc833b7 5569
4ed78545 5570 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5571 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5572 {
4ed78545
AM
5573 /* Long long and SPE vectors are aligned on the stack.
5574 So are other 2 word items such as complex int due to
5575 a historical mistake. */
4cc833b7
RH
5576 if (n_words == 2)
5577 cum->words += cum->words & 1;
5578 cum->words += n_words;
5579 }
4697a36c 5580
4cc833b7
RH
5581 /* Note: continuing to accumulate gregno past when we've started
5582 spilling to the stack indicates the fact that we've started
5583 spilling to the stack to expand_builtin_saveregs. */
5584 cum->sysv_gregno = gregno + n_words;
5585 }
4697a36c 5586
4cc833b7
RH
5587 if (TARGET_DEBUG_ARG)
5588 {
5589 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5590 cum->words, cum->fregno);
5591 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5592 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5593 fprintf (stderr, "mode = %4s, named = %d\n",
5594 GET_MODE_NAME (mode), named);
5595 }
4697a36c
MM
5596 }
5597 else
4cc833b7 5598 {
b2d04ecf 5599 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5600 int start_words = cum->words;
5601 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5602
294bd182 5603 cum->words = align_words + n_words;
4697a36c 5604
ebb109ad 5605 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5606 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5607 {
5608 /* _Decimal128 must be passed in an even/odd float register pair.
5609 This assumes that the register number is odd when fregno is
5610 odd. */
5611 if (mode == TDmode && (cum->fregno % 2) == 1)
5612 cum->fregno++;
5613 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5614 }
4cc833b7
RH
5615
5616 if (TARGET_DEBUG_ARG)
5617 {
5618 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5619 cum->words, cum->fregno);
5620 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5621 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5622 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5623 named, align_words - start_words, depth);
4cc833b7
RH
5624 }
5625 }
4697a36c 5626}
a6c9bed4 5627
f82f556d
AH
5628static rtx
5629spe_build_register_parallel (enum machine_mode mode, int gregno)
5630{
17caeff2 5631 rtx r1, r3, r5, r7;
f82f556d 5632
37409796 5633 switch (mode)
f82f556d 5634 {
37409796 5635 case DFmode:
4d4447b5 5636 case DDmode:
54b695e7
AH
5637 r1 = gen_rtx_REG (DImode, gregno);
5638 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5639 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5640
5641 case DCmode:
17caeff2 5642 case TFmode:
4d4447b5 5643 case TDmode:
54b695e7
AH
5644 r1 = gen_rtx_REG (DImode, gregno);
5645 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5646 r3 = gen_rtx_REG (DImode, gregno + 2);
5647 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5648 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5649
17caeff2
JM
5650 case TCmode:
5651 r1 = gen_rtx_REG (DImode, gregno);
5652 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5653 r3 = gen_rtx_REG (DImode, gregno + 2);
5654 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5655 r5 = gen_rtx_REG (DImode, gregno + 4);
5656 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5657 r7 = gen_rtx_REG (DImode, gregno + 6);
5658 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5659 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5660
37409796
NS
5661 default:
5662 gcc_unreachable ();
f82f556d 5663 }
f82f556d 5664}
b78d48dd 5665
f82f556d 5666/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5667static rtx
f676971a 5668rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5669 tree type)
a6c9bed4 5670{
f82f556d
AH
5671 int gregno = cum->sysv_gregno;
5672
5673 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5674 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5675 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5676 || mode == DDmode || mode == TDmode
5677 || mode == DCmode || mode == TCmode))
f82f556d 5678 {
b5870bee
AH
5679 int n_words = rs6000_arg_size (mode, type);
5680
f82f556d 5681 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5682 if (mode == DFmode || mode == DDmode)
b5870bee 5683 gregno += (1 - gregno) & 1;
f82f556d 5684
b5870bee
AH
5685 /* Multi-reg args are not split between registers and stack. */
5686 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5687 return NULL_RTX;
5688
5689 return spe_build_register_parallel (mode, gregno);
5690 }
a6c9bed4
AH
5691 if (cum->stdarg)
5692 {
c53bdcf5 5693 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5694
5695 /* SPE vectors are put in odd registers. */
5696 if (n_words == 2 && (gregno & 1) == 0)
5697 gregno += 1;
5698
5699 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5700 {
5701 rtx r1, r2;
5702 enum machine_mode m = SImode;
5703
5704 r1 = gen_rtx_REG (m, gregno);
5705 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5706 r2 = gen_rtx_REG (m, gregno + 1);
5707 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5708 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5709 }
5710 else
b78d48dd 5711 return NULL_RTX;
a6c9bed4
AH
5712 }
5713 else
5714 {
f82f556d
AH
5715 if (gregno <= GP_ARG_MAX_REG)
5716 return gen_rtx_REG (mode, gregno);
a6c9bed4 5717 else
b78d48dd 5718 return NULL_RTX;
a6c9bed4
AH
5719 }
5720}
5721
0b5383eb
DJ
5722/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5723 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5724
0b5383eb 5725static void
bb8df8a6 5726rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5727 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5728{
0b5383eb
DJ
5729 enum machine_mode mode;
5730 unsigned int regno;
5731 unsigned int startbit, endbit;
5732 int this_regno, intregs, intoffset;
5733 rtx reg;
594a51fe 5734
0b5383eb
DJ
5735 if (cum->intoffset == -1)
5736 return;
5737
5738 intoffset = cum->intoffset;
5739 cum->intoffset = -1;
5740
5741 /* If this is the trailing part of a word, try to only load that
5742 much into the register. Otherwise load the whole register. Note
5743 that in the latter case we may pick up unwanted bits. It's not a
5744 problem at the moment but may wish to revisit. */
5745
5746 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5747 {
0b5383eb
DJ
5748 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5749 MODE_INT, 0);
5750 if (mode == BLKmode)
5751 {
5752 /* We couldn't find an appropriate mode, which happens,
5753 e.g., in packed structs when there are 3 bytes to load.
5754 Back intoffset back to the beginning of the word in this
5755 case. */
5756 intoffset = intoffset & -BITS_PER_WORD;
5757 mode = word_mode;
5758 }
5759 }
5760 else
5761 mode = word_mode;
5762
5763 startbit = intoffset & -BITS_PER_WORD;
5764 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5765 intregs = (endbit - startbit) / BITS_PER_WORD;
5766 this_regno = cum->words + intoffset / BITS_PER_WORD;
5767
5768 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5769 cum->use_stack = 1;
bb8df8a6 5770
0b5383eb
DJ
5771 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5772 if (intregs <= 0)
5773 return;
5774
5775 intoffset /= BITS_PER_UNIT;
5776 do
5777 {
5778 regno = GP_ARG_MIN_REG + this_regno;
5779 reg = gen_rtx_REG (mode, regno);
5780 rvec[(*k)++] =
5781 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5782
5783 this_regno += 1;
5784 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5785 mode = word_mode;
5786 intregs -= 1;
5787 }
5788 while (intregs > 0);
5789}
5790
5791/* Recursive workhorse for the following. */
5792
5793static void
586de218 5794rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5795 HOST_WIDE_INT startbitpos, rtx rvec[],
5796 int *k)
5797{
5798 tree f;
5799
5800 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5801 if (TREE_CODE (f) == FIELD_DECL)
5802 {
5803 HOST_WIDE_INT bitpos = startbitpos;
5804 tree ftype = TREE_TYPE (f);
70fb00df
AP
5805 enum machine_mode mode;
5806 if (ftype == error_mark_node)
5807 continue;
5808 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5809
5810 if (DECL_SIZE (f) != 0
5811 && host_integerp (bit_position (f), 1))
5812 bitpos += int_bit_position (f);
5813
5814 /* ??? FIXME: else assume zero offset. */
5815
5816 if (TREE_CODE (ftype) == RECORD_TYPE)
5817 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5818 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5819 {
0b5383eb
DJ
5820#if 0
5821 switch (mode)
594a51fe 5822 {
0b5383eb
DJ
5823 case SCmode: mode = SFmode; break;
5824 case DCmode: mode = DFmode; break;
5825 case TCmode: mode = TFmode; break;
5826 default: break;
594a51fe 5827 }
0b5383eb
DJ
5828#endif
5829 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5830 rvec[(*k)++]
bb8df8a6 5831 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5832 gen_rtx_REG (mode, cum->fregno++),
5833 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5834 if (mode == TFmode || mode == TDmode)
0b5383eb 5835 cum->fregno++;
594a51fe 5836 }
0b5383eb
DJ
5837 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5838 {
5839 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5840 rvec[(*k)++]
bb8df8a6
EC
5841 = gen_rtx_EXPR_LIST (VOIDmode,
5842 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5843 GEN_INT (bitpos / BITS_PER_UNIT));
5844 }
5845 else if (cum->intoffset == -1)
5846 cum->intoffset = bitpos;
5847 }
5848}
594a51fe 5849
0b5383eb
DJ
5850/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5851 the register(s) to be used for each field and subfield of a struct
5852 being passed by value, along with the offset of where the
5853 register's value may be found in the block. FP fields go in FP
5854 register, vector fields go in vector registers, and everything
bb8df8a6 5855 else goes in int registers, packed as in memory.
8ff40a74 5856
0b5383eb
DJ
5857 This code is also used for function return values. RETVAL indicates
5858 whether this is the case.
8ff40a74 5859
a4d05547 5860 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5861 calling convention. */
594a51fe 5862
0b5383eb 5863static rtx
586de218 5864rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5865 int named, bool retval)
5866{
5867 rtx rvec[FIRST_PSEUDO_REGISTER];
5868 int k = 1, kbase = 1;
5869 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5870 /* This is a copy; modifications are not visible to our caller. */
5871 CUMULATIVE_ARGS copy_cum = *orig_cum;
5872 CUMULATIVE_ARGS *cum = &copy_cum;
5873
5874 /* Pad to 16 byte boundary if needed. */
5875 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5876 && (cum->words % 2) != 0)
5877 cum->words++;
5878
5879 cum->intoffset = 0;
5880 cum->use_stack = 0;
5881 cum->named = named;
5882
5883 /* Put entries into rvec[] for individual FP and vector fields, and
5884 for the chunks of memory that go in int regs. Note we start at
5885 element 1; 0 is reserved for an indication of using memory, and
5886 may or may not be filled in below. */
5887 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5888 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5889
5890 /* If any part of the struct went on the stack put all of it there.
5891 This hack is because the generic code for
5892 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5893 parts of the struct are not at the beginning. */
5894 if (cum->use_stack)
5895 {
5896 if (retval)
5897 return NULL_RTX; /* doesn't go in registers at all */
5898 kbase = 0;
5899 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5900 }
5901 if (k > 1 || cum->use_stack)
5902 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5903 else
5904 return NULL_RTX;
5905}
5906
b78d48dd
FJ
5907/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5908
5909static rtx
ec6376ab 5910rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5911{
ec6376ab
AM
5912 int n_units;
5913 int i, k;
5914 rtx rvec[GP_ARG_NUM_REG + 1];
5915
5916 if (align_words >= GP_ARG_NUM_REG)
5917 return NULL_RTX;
5918
5919 n_units = rs6000_arg_size (mode, type);
5920
5921 /* Optimize the simple case where the arg fits in one gpr, except in
5922 the case of BLKmode due to assign_parms assuming that registers are
5923 BITS_PER_WORD wide. */
5924 if (n_units == 0
5925 || (n_units == 1 && mode != BLKmode))
5926 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5927
5928 k = 0;
5929 if (align_words + n_units > GP_ARG_NUM_REG)
5930 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5931 using a magic NULL_RTX component.
79773478
AM
5932 This is not strictly correct. Only some of the arg belongs in
5933 memory, not all of it. However, the normal scheme using
5934 function_arg_partial_nregs can result in unusual subregs, eg.
5935 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5936 store the whole arg to memory is often more efficient than code
5937 to store pieces, and we know that space is available in the right
5938 place for the whole arg. */
ec6376ab
AM
5939 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5940
5941 i = 0;
5942 do
36a454e1 5943 {
ec6376ab
AM
5944 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5945 rtx off = GEN_INT (i++ * 4);
5946 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5947 }
ec6376ab
AM
5948 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5949
5950 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5951}
5952
4697a36c
MM
5953/* Determine where to put an argument to a function.
5954 Value is zero to push the argument on the stack,
5955 or a hard register in which to store the argument.
5956
5957 MODE is the argument's machine mode.
5958 TYPE is the data type of the argument (as a tree).
5959 This is null for libcalls where that information may
5960 not be available.
5961 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5962 the preceding args and about the function being called. It is
5963 not modified in this routine.
4697a36c
MM
5964 NAMED is nonzero if this argument is a named parameter
5965 (otherwise it is an extra parameter matching an ellipsis).
5966
5967 On RS/6000 the first eight words of non-FP are normally in registers
5968 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5969 Under V.4, the first 8 FP args are in registers.
5970
5971 If this is floating-point and no prototype is specified, we use
5972 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5973 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5974 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5975 doesn't support PARALLEL anyway.
5976
5977 Note that for args passed by reference, function_arg will be called
5978 with MODE and TYPE set to that of the pointer to the arg, not the arg
5979 itself. */
4697a36c 5980
9390387d 5981rtx
f676971a 5982function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5983 tree type, int named)
4697a36c 5984{
4cc833b7 5985 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5986
a4f6c312
SS
5987 /* Return a marker to indicate whether CR1 needs to set or clear the
5988 bit that V.4 uses to say fp args were passed in registers.
5989 Assume that we don't need the marker for software floating point,
5990 or compiler generated library calls. */
4697a36c
MM
5991 if (mode == VOIDmode)
5992 {
f607bc57 5993 if (abi == ABI_V4
b9599e46 5994 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5995 && (cum->stdarg
5996 || (cum->nargs_prototype < 0
5997 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5998 {
a3170dc6
AH
5999 /* For the SPE, we need to crxor CR6 always. */
6000 if (TARGET_SPE_ABI)
6001 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6002 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6003 return GEN_INT (cum->call_cookie
6004 | ((cum->fregno == FP_ARG_MIN_REG)
6005 ? CALL_V4_SET_FP_ARGS
6006 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6007 }
4697a36c 6008
7509c759 6009 return GEN_INT (cum->call_cookie);
4697a36c
MM
6010 }
6011
0b5383eb
DJ
6012 if (rs6000_darwin64_abi && mode == BLKmode
6013 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6014 {
0b5383eb 6015 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6016 if (rslt != NULL_RTX)
6017 return rslt;
6018 /* Else fall through to usual handling. */
6019 }
6020
2858f73a 6021 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6022 if (TARGET_64BIT && ! cum->prototype)
6023 {
c4ad648e
AM
6024 /* Vector parameters get passed in vector register
6025 and also in GPRs or memory, in absence of prototype. */
6026 int align_words;
6027 rtx slot;
6028 align_words = (cum->words + 1) & ~1;
6029
6030 if (align_words >= GP_ARG_NUM_REG)
6031 {
6032 slot = NULL_RTX;
6033 }
6034 else
6035 {
6036 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6037 }
6038 return gen_rtx_PARALLEL (mode,
6039 gen_rtvec (2,
6040 gen_rtx_EXPR_LIST (VOIDmode,
6041 slot, const0_rtx),
6042 gen_rtx_EXPR_LIST (VOIDmode,
6043 gen_rtx_REG (mode, cum->vregno),
6044 const0_rtx)));
c72d6c26
HP
6045 }
6046 else
6047 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6048 else if (TARGET_ALTIVEC_ABI
6049 && (ALTIVEC_VECTOR_MODE (mode)
6050 || (type && TREE_CODE (type) == VECTOR_TYPE
6051 && int_size_in_bytes (type) == 16)))
0ac081f6 6052 {
2858f73a 6053 if (named || abi == ABI_V4)
a594a19c 6054 return NULL_RTX;
0ac081f6 6055 else
a594a19c
GK
6056 {
6057 /* Vector parameters to varargs functions under AIX or Darwin
6058 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6059 int align, align_words, n_words;
6060 enum machine_mode part_mode;
a594a19c
GK
6061
6062 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6063 2 mod 4 in terms of words in 32-bit mode, since the parameter
6064 save area starts at offset 24 from the stack. In 64-bit mode,
6065 they just have to start on an even word, since the parameter
6066 save area is 16-byte aligned. */
6067 if (TARGET_32BIT)
4ed78545 6068 align = (2 - cum->words) & 3;
2858f73a
GK
6069 else
6070 align = cum->words & 1;
a594a19c
GK
6071 align_words = cum->words + align;
6072
6073 /* Out of registers? Memory, then. */
6074 if (align_words >= GP_ARG_NUM_REG)
6075 return NULL_RTX;
ec6376ab
AM
6076
6077 if (TARGET_32BIT && TARGET_POWERPC64)
6078 return rs6000_mixed_function_arg (mode, type, align_words);
6079
2858f73a
GK
6080 /* The vector value goes in GPRs. Only the part of the
6081 value in GPRs is reported here. */
ec6376ab
AM
6082 part_mode = mode;
6083 n_words = rs6000_arg_size (mode, type);
6084 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6085 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6086 is either wholly in GPRs or half in GPRs and half not. */
6087 part_mode = DImode;
ec6376ab
AM
6088
6089 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6090 }
0ac081f6 6091 }
f82f556d
AH
6092 else if (TARGET_SPE_ABI && TARGET_SPE
6093 && (SPE_VECTOR_MODE (mode)
18f63bfa 6094 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 6095 || mode == DDmode
17caeff2
JM
6096 || mode == DCmode
6097 || mode == TFmode
7393f7f8 6098 || mode == TDmode
17caeff2 6099 || mode == TCmode))))
a6c9bed4 6100 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6101
f607bc57 6102 else if (abi == ABI_V4)
4697a36c 6103 {
a3170dc6 6104 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6105 && (mode == SFmode || mode == DFmode
7393f7f8 6106 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6107 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6108 {
2d83f070
JJ
6109 /* _Decimal128 must use an even/odd register pair. This assumes
6110 that the register number is odd when fregno is odd. */
6111 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6112 cum->fregno++;
6113
6114 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6115 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6116 return gen_rtx_REG (mode, cum->fregno);
6117 else
b78d48dd 6118 return NULL_RTX;
4cc833b7
RH
6119 }
6120 else
6121 {
b2d04ecf 6122 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6123 int gregno = cum->sysv_gregno;
6124
4ed78545
AM
6125 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6126 (r7,r8) or (r9,r10). As does any other 2 word item such
6127 as complex int due to a historical mistake. */
6128 if (n_words == 2)
6129 gregno += (1 - gregno) & 1;
4cc833b7 6130
4ed78545 6131 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6132 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6133 return NULL_RTX;
ec6376ab
AM
6134
6135 if (TARGET_32BIT && TARGET_POWERPC64)
6136 return rs6000_mixed_function_arg (mode, type,
6137 gregno - GP_ARG_MIN_REG);
6138 return gen_rtx_REG (mode, gregno);
4cc833b7 6139 }
4697a36c 6140 }
4cc833b7
RH
6141 else
6142 {
294bd182 6143 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6144
2d83f070
JJ
6145 /* _Decimal128 must be passed in an even/odd float register pair.
6146 This assumes that the register number is odd when fregno is odd. */
6147 if (mode == TDmode && (cum->fregno % 2) == 1)
6148 cum->fregno++;
6149
2858f73a 6150 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6151 {
ec6376ab
AM
6152 rtx rvec[GP_ARG_NUM_REG + 1];
6153 rtx r;
6154 int k;
c53bdcf5
AM
6155 bool needs_psave;
6156 enum machine_mode fmode = mode;
c53bdcf5
AM
6157 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6158
6159 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6160 {
c53bdcf5
AM
6161 /* Currently, we only ever need one reg here because complex
6162 doubles are split. */
7393f7f8
BE
6163 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6164 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6165
7393f7f8
BE
6166 /* Long double or _Decimal128 split over regs and memory. */
6167 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6168 }
c53bdcf5
AM
6169
6170 /* Do we also need to pass this arg in the parameter save
6171 area? */
6172 needs_psave = (type
6173 && (cum->nargs_prototype <= 0
6174 || (DEFAULT_ABI == ABI_AIX
de17c25f 6175 && TARGET_XL_COMPAT
c53bdcf5
AM
6176 && align_words >= GP_ARG_NUM_REG)));
6177
6178 if (!needs_psave && mode == fmode)
ec6376ab 6179 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6180
ec6376ab 6181 k = 0;
c53bdcf5
AM
6182 if (needs_psave)
6183 {
ec6376ab 6184 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6185 This piece must come first, before the fprs. */
c53bdcf5
AM
6186 if (align_words < GP_ARG_NUM_REG)
6187 {
6188 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6189
6190 if (align_words + n_words > GP_ARG_NUM_REG
6191 || (TARGET_32BIT && TARGET_POWERPC64))
6192 {
6193 /* If this is partially on the stack, then we only
6194 include the portion actually in registers here. */
6195 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6196 rtx off;
79773478
AM
6197 int i = 0;
6198 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6199 /* Not all of the arg fits in gprs. Say that it
6200 goes in memory too, using a magic NULL_RTX
6201 component. Also see comment in
6202 rs6000_mixed_function_arg for why the normal
6203 function_arg_partial_nregs scheme doesn't work
6204 in this case. */
6205 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6206 const0_rtx);
ec6376ab
AM
6207 do
6208 {
6209 r = gen_rtx_REG (rmode,
6210 GP_ARG_MIN_REG + align_words);
2e6c9641 6211 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6212 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6213 }
6214 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6215 }
6216 else
6217 {
6218 /* The whole arg fits in gprs. */
6219 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6220 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6221 }
c53bdcf5 6222 }
ec6376ab
AM
6223 else
6224 /* It's entirely in memory. */
6225 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6226 }
6227
ec6376ab
AM
6228 /* Describe where this piece goes in the fprs. */
6229 r = gen_rtx_REG (fmode, cum->fregno);
6230 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6231
6232 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6233 }
6234 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6235 {
ec6376ab
AM
6236 if (TARGET_32BIT && TARGET_POWERPC64)
6237 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6238
4eeca74f
AM
6239 if (mode == BLKmode)
6240 mode = Pmode;
6241
b2d04ecf
AM
6242 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6243 }
4cc833b7
RH
6244 else
6245 return NULL_RTX;
4697a36c 6246 }
4697a36c
MM
6247}
6248\f
ec6376ab 6249/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6250 the number of bytes passed in registers. For args passed entirely in
6251 registers or entirely in memory, zero. When an arg is described by a
6252 PARALLEL, perhaps using more than one register type, this function
6253 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6254
78a52f11
RH
6255static int
6256rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6257 tree type, bool named)
4697a36c 6258{
c53bdcf5 6259 int ret = 0;
ec6376ab 6260 int align_words;
c53bdcf5 6261
f607bc57 6262 if (DEFAULT_ABI == ABI_V4)
4697a36c 6263 return 0;
4697a36c 6264
c53bdcf5
AM
6265 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6266 && cum->nargs_prototype >= 0)
6267 return 0;
6268
0b5383eb
DJ
6269 /* In this complicated case we just disable the partial_nregs code. */
6270 if (rs6000_darwin64_abi && mode == BLKmode
6271 && TREE_CODE (type) == RECORD_TYPE
6272 && int_size_in_bytes (type) > 0)
6273 return 0;
6274
294bd182 6275 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6276
79773478
AM
6277 if (USE_FP_FOR_ARG_P (cum, mode, type))
6278 {
fb63c729
AM
6279 /* If we are passing this arg in the fixed parameter save area
6280 (gprs or memory) as well as fprs, then this function should
79773478
AM
6281 return the number of partial bytes passed in the parameter
6282 save area rather than partial bytes passed in fprs. */
6283 if (type
6284 && (cum->nargs_prototype <= 0
6285 || (DEFAULT_ABI == ABI_AIX
6286 && TARGET_XL_COMPAT
6287 && align_words >= GP_ARG_NUM_REG)))
6288 return 0;
6289 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6290 > FP_ARG_MAX_REG + 1)
ac7e839c 6291 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6292 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6293 return 0;
6294 }
6295
ec6376ab
AM
6296 if (align_words < GP_ARG_NUM_REG
6297 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6298 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6299
c53bdcf5 6300 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6301 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6302
c53bdcf5 6303 return ret;
4697a36c
MM
6304}
6305\f
6306/* A C expression that indicates when an argument must be passed by
6307 reference. If nonzero for an argument, a copy of that argument is
6308 made in memory and a pointer to the argument is passed instead of
6309 the argument itself. The pointer is passed in whatever way is
6310 appropriate for passing a pointer to that type.
6311
b2d04ecf
AM
6312 Under V.4, aggregates and long double are passed by reference.
6313
6314 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6315 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6316
6317 As an extension to all ABIs, variable sized types are passed by
6318 reference. */
4697a36c 6319
8cd5a4e0 6320static bool
f676971a 6321rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6322 enum machine_mode mode, const_tree type,
bada2eb8 6323 bool named ATTRIBUTE_UNUSED)
4697a36c 6324{
602ea4d3 6325 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6326 {
6327 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6328 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6329 return 1;
6330 }
6331
6332 if (!type)
6333 return 0;
4697a36c 6334
bada2eb8
DE
6335 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6336 {
6337 if (TARGET_DEBUG_ARG)
6338 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6339 return 1;
6340 }
6341
6342 if (int_size_in_bytes (type) < 0)
6343 {
6344 if (TARGET_DEBUG_ARG)
6345 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6346 return 1;
6347 }
6348
6349 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6350 modes only exist for GCC vector types if -maltivec. */
6351 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6352 {
6353 if (TARGET_DEBUG_ARG)
6354 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6355 return 1;
6356 }
b693336b
PB
6357
6358 /* Pass synthetic vectors in memory. */
bada2eb8 6359 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6360 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6361 {
6362 static bool warned_for_pass_big_vectors = false;
6363 if (TARGET_DEBUG_ARG)
6364 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6365 if (!warned_for_pass_big_vectors)
6366 {
d4ee4d25 6367 warning (0, "GCC vector passed by reference: "
b693336b
PB
6368 "non-standard ABI extension with no compatibility guarantee");
6369 warned_for_pass_big_vectors = true;
6370 }
6371 return 1;
6372 }
6373
b2d04ecf 6374 return 0;
4697a36c 6375}
5985c7a6
FJ
6376
6377static void
2d9db8eb 6378rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6379{
6380 int i;
6381 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6382
6383 if (nregs == 0)
6384 return;
6385
c4ad648e 6386 for (i = 0; i < nregs; i++)
5985c7a6 6387 {
9390387d 6388 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6389 if (reload_completed)
c4ad648e
AM
6390 {
6391 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6392 tem = NULL_RTX;
6393 else
6394 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6395 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6396 }
5985c7a6
FJ
6397 else
6398 tem = replace_equiv_address (tem, XEXP (tem, 0));
6399
37409796 6400 gcc_assert (tem);
5985c7a6
FJ
6401
6402 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6403 }
6404}
4697a36c
MM
6405\f
6406/* Perform any needed actions needed for a function that is receiving a
f676971a 6407 variable number of arguments.
4697a36c
MM
6408
6409 CUM is as above.
6410
6411 MODE and TYPE are the mode and type of the current parameter.
6412
6413 PRETEND_SIZE is a variable that should be set to the amount of stack
6414 that must be pushed by the prolog to pretend that our caller pushed
6415 it.
6416
6417 Normally, this macro will push all remaining incoming registers on the
6418 stack and set PRETEND_SIZE to the length of the registers pushed. */
6419
c6e8c921 6420static void
f676971a 6421setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6422 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6423 int no_rtl)
4697a36c 6424{
4cc833b7
RH
6425 CUMULATIVE_ARGS next_cum;
6426 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6427 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6428 int first_reg_offset;
6429 alias_set_type set;
4697a36c 6430
f31bf321 6431 /* Skip the last named argument. */
d34c5b80 6432 next_cum = *cum;
594a51fe 6433 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6434
f607bc57 6435 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6436 {
5b667039
JJ
6437 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6438
60e2d0ca 6439 if (! no_rtl)
5b667039
JJ
6440 {
6441 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6442 HOST_WIDE_INT offset = 0;
6443
6444 /* Try to optimize the size of the varargs save area.
6445 The ABI requires that ap.reg_save_area is doubleword
6446 aligned, but we don't need to allocate space for all
6447 the bytes, only those to which we actually will save
6448 anything. */
6449 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6450 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6451 if (TARGET_HARD_FLOAT && TARGET_FPRS
6452 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6453 && cfun->va_list_fpr_size)
6454 {
6455 if (gpr_reg_num)
6456 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6457 * UNITS_PER_FP_WORD;
6458 if (cfun->va_list_fpr_size
6459 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6460 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6461 else
6462 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6463 * UNITS_PER_FP_WORD;
6464 }
6465 if (gpr_reg_num)
6466 {
6467 offset = -((first_reg_offset * reg_size) & ~7);
6468 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6469 {
6470 gpr_reg_num = cfun->va_list_gpr_size;
6471 if (reg_size == 4 && (first_reg_offset & 1))
6472 gpr_reg_num++;
6473 }
6474 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6475 }
6476 else if (fpr_size)
6477 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6478 * UNITS_PER_FP_WORD
6479 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6480
5b667039
JJ
6481 if (gpr_size + fpr_size)
6482 {
6483 rtx reg_save_area
6484 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6485 gcc_assert (GET_CODE (reg_save_area) == MEM);
6486 reg_save_area = XEXP (reg_save_area, 0);
6487 if (GET_CODE (reg_save_area) == PLUS)
6488 {
6489 gcc_assert (XEXP (reg_save_area, 0)
6490 == virtual_stack_vars_rtx);
6491 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6492 offset += INTVAL (XEXP (reg_save_area, 1));
6493 }
6494 else
6495 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6496 }
6497
6498 cfun->machine->varargs_save_offset = offset;
6499 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6500 }
4697a36c 6501 }
60e2d0ca 6502 else
4697a36c 6503 {
d34c5b80 6504 first_reg_offset = next_cum.words;
4cc833b7 6505 save_area = virtual_incoming_args_rtx;
4697a36c 6506
fe984136 6507 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6508 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6509 }
4697a36c 6510
dfafc897 6511 set = get_varargs_alias_set ();
9d30f3c1
JJ
6512 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6513 && cfun->va_list_gpr_size)
4cc833b7 6514 {
9d30f3c1
JJ
6515 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6516
6517 if (va_list_gpr_counter_field)
6518 {
6519 /* V4 va_list_gpr_size counts number of registers needed. */
6520 if (nregs > cfun->va_list_gpr_size)
6521 nregs = cfun->va_list_gpr_size;
6522 }
6523 else
6524 {
6525 /* char * va_list instead counts number of bytes needed. */
6526 if (nregs > cfun->va_list_gpr_size / reg_size)
6527 nregs = cfun->va_list_gpr_size / reg_size;
6528 }
6529
dfafc897 6530 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6531 plus_constant (save_area,
13e2e16e
DE
6532 first_reg_offset * reg_size));
6533 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6534 set_mem_alias_set (mem, set);
8ac61af7 6535 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6536
f676971a 6537 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6538 nregs);
4697a36c
MM
6539 }
6540
4697a36c 6541 /* Save FP registers if needed. */
f607bc57 6542 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6543 && TARGET_HARD_FLOAT && TARGET_FPRS
6544 && ! no_rtl
9d30f3c1
JJ
6545 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6546 && cfun->va_list_fpr_size)
4697a36c 6547 {
9d30f3c1 6548 int fregno = next_cum.fregno, nregs;
9ebbca7d 6549 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6550 rtx lab = gen_label_rtx ();
5b667039
JJ
6551 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6552 * UNITS_PER_FP_WORD);
4697a36c 6553
c4ad648e
AM
6554 emit_jump_insn
6555 (gen_rtx_SET (VOIDmode,
6556 pc_rtx,
6557 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6558 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6559 const0_rtx),
39403d82 6560 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6561 pc_rtx)));
6562
9d30f3c1
JJ
6563 for (nregs = 0;
6564 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6565 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6566 {
5496b36f 6567 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6568 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6569 set_mem_alias_set (mem, set);
94ff898d 6570 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6571 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6572 }
4cc833b7
RH
6573
6574 emit_label (lab);
4697a36c 6575 }
4697a36c 6576}
4697a36c 6577
dfafc897 6578/* Create the va_list data type. */
2c4974b7 6579
c35d187f
RH
6580static tree
6581rs6000_build_builtin_va_list (void)
dfafc897 6582{
64c2816f 6583 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6584
9ebbca7d
GK
6585 /* For AIX, prefer 'char *' because that's what the system
6586 header files like. */
f607bc57 6587 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6588 return build_pointer_type (char_type_node);
dfafc897 6589
f1e639b1 6590 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6591 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6592
f676971a 6593 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6594 unsigned_char_type_node);
f676971a 6595 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6596 unsigned_char_type_node);
64c2816f
DT
6597 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6598 every user file. */
6599 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6600 short_unsigned_type_node);
dfafc897
FS
6601 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6602 ptr_type_node);
6603 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6604 ptr_type_node);
6605
9d30f3c1
JJ
6606 va_list_gpr_counter_field = f_gpr;
6607 va_list_fpr_counter_field = f_fpr;
6608
dfafc897
FS
6609 DECL_FIELD_CONTEXT (f_gpr) = record;
6610 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6611 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6612 DECL_FIELD_CONTEXT (f_ovf) = record;
6613 DECL_FIELD_CONTEXT (f_sav) = record;
6614
bab45a51
FS
6615 TREE_CHAIN (record) = type_decl;
6616 TYPE_NAME (record) = type_decl;
dfafc897
FS
6617 TYPE_FIELDS (record) = f_gpr;
6618 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6619 TREE_CHAIN (f_fpr) = f_res;
6620 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6621 TREE_CHAIN (f_ovf) = f_sav;
6622
6623 layout_type (record);
6624
6625 /* The correct type is an array type of one element. */
6626 return build_array_type (record, build_index_type (size_zero_node));
6627}
6628
6629/* Implement va_start. */
6630
d7bd8aeb 6631static void
a2369ed3 6632rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6633{
dfafc897 6634 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6635 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6636 tree gpr, fpr, ovf, sav, t;
2c4974b7 6637
dfafc897 6638 /* Only SVR4 needs something special. */
f607bc57 6639 if (DEFAULT_ABI != ABI_V4)
dfafc897 6640 {
e5faf155 6641 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6642 return;
6643 }
6644
973a648b 6645 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6646 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6647 f_res = TREE_CHAIN (f_fpr);
6648 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6649 f_sav = TREE_CHAIN (f_ovf);
6650
872a65b5 6651 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6652 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6653 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6654 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6655 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6656
6657 /* Count number of gp and fp argument registers used. */
4cc833b7 6658 words = current_function_args_info.words;
987732e0
DE
6659 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6660 GP_ARG_NUM_REG);
6661 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6662 FP_ARG_NUM_REG);
dfafc897
FS
6663
6664 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6665 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6666 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6667 words, n_gpr, n_fpr);
dfafc897 6668
9d30f3c1
JJ
6669 if (cfun->va_list_gpr_size)
6670 {
07beea0d 6671 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6672 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6673 TREE_SIDE_EFFECTS (t) = 1;
6674 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6675 }
58c8adc1 6676
9d30f3c1
JJ
6677 if (cfun->va_list_fpr_size)
6678 {
07beea0d 6679 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6680 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6681 TREE_SIDE_EFFECTS (t) = 1;
6682 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6683 }
dfafc897
FS
6684
6685 /* Find the overflow area. */
6686 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6687 if (words != 0)
5be014d5
AP
6688 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6689 size_int (words * UNITS_PER_WORD));
07beea0d 6690 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6691 TREE_SIDE_EFFECTS (t) = 1;
6692 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6693
9d30f3c1
JJ
6694 /* If there were no va_arg invocations, don't set up the register
6695 save area. */
6696 if (!cfun->va_list_gpr_size
6697 && !cfun->va_list_fpr_size
6698 && n_gpr < GP_ARG_NUM_REG
6699 && n_fpr < FP_ARG_V4_MAX_REG)
6700 return;
6701
dfafc897
FS
6702 /* Find the register save area. */
6703 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6704 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6705 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6706 size_int (cfun->machine->varargs_save_offset));
07beea0d 6707 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6708 TREE_SIDE_EFFECTS (t) = 1;
6709 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6710}
6711
6712/* Implement va_arg. */
6713
23a60a04
JM
6714tree
6715rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6716{
cd3ce9b4
JM
6717 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6718 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6719 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6720 tree lab_false, lab_over, addr;
6721 int align;
6722 tree ptrtype = build_pointer_type (type);
7393f7f8 6723 int regalign = 0;
cd3ce9b4 6724
08b0dc1b
RH
6725 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6726 {
6727 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6728 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6729 }
6730
cd3ce9b4
JM
6731 if (DEFAULT_ABI != ABI_V4)
6732 {
08b0dc1b 6733 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6734 {
6735 tree elem_type = TREE_TYPE (type);
6736 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6737 int elem_size = GET_MODE_SIZE (elem_mode);
6738
6739 if (elem_size < UNITS_PER_WORD)
6740 {
23a60a04 6741 tree real_part, imag_part;
cd3ce9b4
JM
6742 tree post = NULL_TREE;
6743
23a60a04
JM
6744 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6745 &post);
6746 /* Copy the value into a temporary, lest the formal temporary
6747 be reused out from under us. */
6748 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6749 append_to_statement_list (post, pre_p);
6750
23a60a04
JM
6751 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6752 post_p);
cd3ce9b4 6753
47a25a46 6754 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6755 }
6756 }
6757
23a60a04 6758 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6759 }
6760
6761 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6762 f_fpr = TREE_CHAIN (f_gpr);
6763 f_res = TREE_CHAIN (f_fpr);
6764 f_ovf = TREE_CHAIN (f_res);
6765 f_sav = TREE_CHAIN (f_ovf);
6766
872a65b5 6767 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6768 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6769 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6770 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6771 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6772
6773 size = int_size_in_bytes (type);
6774 rsize = (size + 3) / 4;
6775 align = 1;
6776
08b0dc1b 6777 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6778 && (TYPE_MODE (type) == SFmode
6779 || TYPE_MODE (type) == DFmode
7393f7f8 6780 || TYPE_MODE (type) == TFmode
e41b2a33 6781 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6782 || TYPE_MODE (type) == DDmode
6783 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6784 {
6785 /* FP args go in FP registers, if present. */
cd3ce9b4 6786 reg = fpr;
602ea4d3 6787 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6788 sav_ofs = 8*4;
6789 sav_scale = 8;
e41b2a33 6790 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6791 align = 8;
6792 }
6793 else
6794 {
6795 /* Otherwise into GP registers. */
cd3ce9b4
JM
6796 reg = gpr;
6797 n_reg = rsize;
6798 sav_ofs = 0;
6799 sav_scale = 4;
6800 if (n_reg == 2)
6801 align = 8;
6802 }
6803
6804 /* Pull the value out of the saved registers.... */
6805
6806 lab_over = NULL;
6807 addr = create_tmp_var (ptr_type_node, "addr");
6808 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6809
6810 /* AltiVec vectors never go in registers when -mabi=altivec. */
6811 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6812 align = 16;
6813 else
6814 {
6815 lab_false = create_artificial_label ();
6816 lab_over = create_artificial_label ();
6817
6818 /* Long long and SPE vectors are aligned in the registers.
6819 As are any other 2 gpr item such as complex int due to a
6820 historical mistake. */
6821 u = reg;
602ea4d3 6822 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6823 {
7393f7f8 6824 regalign = 1;
cd3ce9b4 6825 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6826 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6827 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6828 }
7393f7f8
BE
6829 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6830 reg number is 0 for f1, so we want to make it odd. */
6831 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6832 {
6833 regalign = 1;
6834 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6835 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6836 }
cd3ce9b4 6837
95674810 6838 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6839 t = build2 (GE_EXPR, boolean_type_node, u, t);
6840 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6841 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6842 gimplify_and_add (t, pre_p);
6843
6844 t = sav;
6845 if (sav_ofs)
5be014d5 6846 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6847
8fb632eb
ZD
6848 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6849 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6850 u = fold_convert (sizetype, u);
6851 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6852 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6853
e41b2a33
PB
6854 /* _Decimal32 varargs are located in the second word of the 64-bit
6855 FP register for 32-bit binaries. */
6856 if (!TARGET_POWERPC64 && TYPE_MODE (type) == SDmode)
6857 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6858
07beea0d 6859 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6860 gimplify_and_add (t, pre_p);
6861
6862 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6863 gimplify_and_add (t, pre_p);
6864
6865 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6866 append_to_statement_list (t, pre_p);
6867
7393f7f8 6868 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6869 {
6870 /* Ensure that we don't find any more args in regs.
7393f7f8 6871 Alignment has taken care of for special cases. */
07beea0d 6872 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6873 gimplify_and_add (t, pre_p);
6874 }
6875 }
6876
6877 /* ... otherwise out of the overflow area. */
6878
6879 /* Care for on-stack alignment if needed. */
6880 t = ovf;
6881 if (align != 1)
6882 {
5be014d5
AP
6883 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6884 t = fold_convert (sizetype, t);
4a90aeeb 6885 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6886 size_int (-align));
6887 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6888 }
6889 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6890
07beea0d 6891 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6892 gimplify_and_add (u, pre_p);
6893
5be014d5 6894 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6895 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6896 gimplify_and_add (t, pre_p);
6897
6898 if (lab_over)
6899 {
6900 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6901 append_to_statement_list (t, pre_p);
6902 }
6903
0cfbc62b
JM
6904 if (STRICT_ALIGNMENT
6905 && (TYPE_ALIGN (type)
6906 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6907 {
6908 /* The value (of type complex double, for example) may not be
6909 aligned in memory in the saved registers, so copy via a
6910 temporary. (This is the same code as used for SPARC.) */
6911 tree tmp = create_tmp_var (type, "va_arg_tmp");
6912 tree dest_addr = build_fold_addr_expr (tmp);
6913
5039610b
SL
6914 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6915 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6916
6917 gimplify_and_add (copy, pre_p);
6918 addr = dest_addr;
6919 }
6920
08b0dc1b 6921 addr = fold_convert (ptrtype, addr);
872a65b5 6922 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6923}
6924
0ac081f6
AH
6925/* Builtins. */
6926
58646b77
PB
6927static void
6928def_builtin (int mask, const char *name, tree type, int code)
6929{
96038623 6930 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6931 {
6932 if (rs6000_builtin_decls[code])
6933 abort ();
6934
6935 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6936 add_builtin_function (name, type, code, BUILT_IN_MD,
6937 NULL, NULL_TREE);
58646b77
PB
6938 }
6939}
0ac081f6 6940
24408032
AH
6941/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6942
2212663f 6943static const struct builtin_description bdesc_3arg[] =
24408032
AH
6944{
6945 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6946 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6947 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6948 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6949 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6950 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6951 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6952 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6953 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6954 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6955 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6957 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6958 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6959 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6968
6969 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6970 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6971 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6972 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6973 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6974 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6975 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6976 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6977 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6978 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6979 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6980 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6981 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6982 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6983 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6984
6985 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6986 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6987 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6988 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6989 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6990 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6991 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6992 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6993 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6994};
2212663f 6995
95385cbb
AH
6996/* DST operations: void foo (void *, const int, const char). */
6997
6998static const struct builtin_description bdesc_dst[] =
6999{
7000 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7001 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7002 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7003 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7004
7005 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7006 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7007 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7009};
7010
2212663f 7011/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7012
a3170dc6 7013static struct builtin_description bdesc_2arg[] =
0ac081f6 7014{
f18c054f
DB
7015 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7016 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7017 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7018 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7019 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7020 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7021 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7022 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7026 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7027 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7029 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7030 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7031 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7032 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7033 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7034 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7035 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7036 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7037 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7038 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7039 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7040 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7041 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7042 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7043 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7044 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7045 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7046 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7047 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7048 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7049 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7050 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7051 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7052 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7053 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7054 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7055 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7056 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7057 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7058 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7059 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7060 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7061 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7062 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7063 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7064 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7065 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7066 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7067 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7068 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7069 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7070 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7071 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7072 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7073 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7074 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7075 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7076 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7077 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7078 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7079 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7080 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7081 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7082 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7083 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7084 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7085 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7087 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7090 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7092 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
7093 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7094 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
7096 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7097 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7098 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
7101 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7102 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7103 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7104 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7105 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7106 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7107 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7108 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7109 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7110 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7111 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7112 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7113 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7114 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7115 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7117 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7118 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7119 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7120 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7121 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7122 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7123 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7124 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7125 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7126
58646b77
PB
7127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7243 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7244 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7245 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7246 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7247 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7248 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7249 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7250 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7251 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7254
96038623
DE
7255 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7256 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7257 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7258 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7259 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7260 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7261 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7262 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7263 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7264 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7265
a3170dc6
AH
7266 /* Place holder, leave as first spe builtin. */
7267 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7268 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7269 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7270 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7271 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7272 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7273 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7274 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7275 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7276 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7277 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7278 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7279 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7280 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7281 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7282 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7283 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7284 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7285 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7286 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7287 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7288 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7289 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7290 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7291 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7292 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7293 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7294 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7295 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7296 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7297 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7298 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7299 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7300 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7301 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7302 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7303 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7304 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7305 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7306 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7307 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7308 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7309 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7310 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7311 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7312 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7313 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7314 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7315 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7316 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7317 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7318 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7319 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7320 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7321 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7322 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7323 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7324 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7325 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7326 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7327 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7328 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7329 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7330 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7331 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7332 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7333 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7334 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7335 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7336 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7337 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7338 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7339 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7340 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7341 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7342 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7343 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7344 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7345 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7346 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7347 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7348 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7349 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7350 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7351 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7352 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7353 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7354 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7355 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7356 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7357 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7358 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7359 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7360 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7361 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7362 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7363 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7364 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7365 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7366 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7367 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7368 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7369 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7370 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7371 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7372 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7373 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7374 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7375 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7376
7377 /* SPE binary operations expecting a 5-bit unsigned literal. */
7378 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7379
7380 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7381 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7382 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7383 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7384 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7385 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7386 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7387 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7388 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7389 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7390 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7391 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7392 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7393 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7394 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7395 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7396 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7397 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7398 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7399 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7400 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7401 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7402 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7403 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7404 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7405 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7406
7407 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7408 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7409};
7410
7411/* AltiVec predicates. */
7412
7413struct builtin_description_predicates
7414{
7415 const unsigned int mask;
7416 const enum insn_code icode;
7417 const char *opcode;
7418 const char *const name;
7419 const enum rs6000_builtins code;
7420};
7421
7422static const struct builtin_description_predicates bdesc_altivec_preds[] =
7423{
7424 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7425 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7426 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7427 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7428 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7429 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7430 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7431 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7432 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7433 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7434 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7435 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7436 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7437
7438 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7439 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7440 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7441};
24408032 7442
a3170dc6
AH
7443/* SPE predicates. */
7444static struct builtin_description bdesc_spe_predicates[] =
7445{
7446 /* Place-holder. Leave as first. */
7447 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7448 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7449 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7450 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7451 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7452 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7453 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7454 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7455 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7456 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7457 /* Place-holder. Leave as last. */
7458 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7459};
7460
7461/* SPE evsel predicates. */
7462static struct builtin_description bdesc_spe_evsel[] =
7463{
7464 /* Place-holder. Leave as first. */
7465 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7466 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7467 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7468 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7469 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7470 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7471 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7472 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7473 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7474 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7475 /* Place-holder. Leave as last. */
7476 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7477};
7478
96038623
DE
7479/* PAIRED predicates. */
7480static const struct builtin_description bdesc_paired_preds[] =
7481{
7482 /* Place-holder. Leave as first. */
7483 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7484 /* Place-holder. Leave as last. */
7485 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7486};
7487
b6d08ca1 7488/* ABS* operations. */
100c4561
AH
7489
7490static const struct builtin_description bdesc_abs[] =
7491{
7492 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7493 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7494 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7495 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7496 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7497 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7498 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7499};
7500
617e0e1d
DB
7501/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7502 foo (VECa). */
24408032 7503
a3170dc6 7504static struct builtin_description bdesc_1arg[] =
2212663f 7505{
617e0e1d
DB
7506 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7507 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7508 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7509 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7510 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7511 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7512 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7513 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7514 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7515 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7516 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7517 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7518 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7519 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7520 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7521 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7522 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7523
58646b77
PB
7524 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7525 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7526 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7527 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7528 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7529 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7530 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7531 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7532 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7533 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7534 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7535 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7536 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7537 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7538 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7539 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7540 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7541 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7542 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7543
a3170dc6
AH
7544 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7545 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7546 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7547 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7548 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7549 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7550 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7551 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7552 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7553 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7554 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7555 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7556 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7557 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7558 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7559 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7560 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7561 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7562 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7563 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7564 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7565 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7566 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7567 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7568 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7569 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7570 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7571 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7572 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7573 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7574
7575 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7576 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7577
7578 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7579 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7580 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7581 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7582 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7583};
7584
7585static rtx
5039610b 7586rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7587{
7588 rtx pat;
5039610b 7589 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7590 rtx op0 = expand_normal (arg0);
2212663f
DB
7591 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7592 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7593
0559cc77
DE
7594 if (icode == CODE_FOR_nothing)
7595 /* Builtin not supported on this processor. */
7596 return 0;
7597
20e26713
AH
7598 /* If we got invalid arguments bail out before generating bad rtl. */
7599 if (arg0 == error_mark_node)
9a171fcd 7600 return const0_rtx;
20e26713 7601
0559cc77
DE
7602 if (icode == CODE_FOR_altivec_vspltisb
7603 || icode == CODE_FOR_altivec_vspltish
7604 || icode == CODE_FOR_altivec_vspltisw
7605 || icode == CODE_FOR_spe_evsplatfi
7606 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7607 {
7608 /* Only allow 5-bit *signed* literals. */
b44140e7 7609 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7610 || INTVAL (op0) > 15
7611 || INTVAL (op0) < -16)
b44140e7
AH
7612 {
7613 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7614 return const0_rtx;
b44140e7 7615 }
b44140e7
AH
7616 }
7617
c62f2db5 7618 if (target == 0
2212663f
DB
7619 || GET_MODE (target) != tmode
7620 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7621 target = gen_reg_rtx (tmode);
7622
7623 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7624 op0 = copy_to_mode_reg (mode0, op0);
7625
7626 pat = GEN_FCN (icode) (target, op0);
7627 if (! pat)
7628 return 0;
7629 emit_insn (pat);
0ac081f6 7630
2212663f
DB
7631 return target;
7632}
ae4b4a02 7633
100c4561 7634static rtx
5039610b 7635altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7636{
7637 rtx pat, scratch1, scratch2;
5039610b 7638 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7639 rtx op0 = expand_normal (arg0);
100c4561
AH
7640 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7641 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7642
7643 /* If we have invalid arguments, bail out before generating bad rtl. */
7644 if (arg0 == error_mark_node)
9a171fcd 7645 return const0_rtx;
100c4561
AH
7646
7647 if (target == 0
7648 || GET_MODE (target) != tmode
7649 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7650 target = gen_reg_rtx (tmode);
7651
7652 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7653 op0 = copy_to_mode_reg (mode0, op0);
7654
7655 scratch1 = gen_reg_rtx (mode0);
7656 scratch2 = gen_reg_rtx (mode0);
7657
7658 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7659 if (! pat)
7660 return 0;
7661 emit_insn (pat);
7662
7663 return target;
7664}
7665
0ac081f6 7666static rtx
5039610b 7667rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7668{
7669 rtx pat;
5039610b
SL
7670 tree arg0 = CALL_EXPR_ARG (exp, 0);
7671 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7672 rtx op0 = expand_normal (arg0);
7673 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7674 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7675 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7676 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7677
0559cc77
DE
7678 if (icode == CODE_FOR_nothing)
7679 /* Builtin not supported on this processor. */
7680 return 0;
7681
20e26713
AH
7682 /* If we got invalid arguments bail out before generating bad rtl. */
7683 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7684 return const0_rtx;
20e26713 7685
0559cc77
DE
7686 if (icode == CODE_FOR_altivec_vcfux
7687 || icode == CODE_FOR_altivec_vcfsx
7688 || icode == CODE_FOR_altivec_vctsxs
7689 || icode == CODE_FOR_altivec_vctuxs
7690 || icode == CODE_FOR_altivec_vspltb
7691 || icode == CODE_FOR_altivec_vsplth
7692 || icode == CODE_FOR_altivec_vspltw
7693 || icode == CODE_FOR_spe_evaddiw
7694 || icode == CODE_FOR_spe_evldd
7695 || icode == CODE_FOR_spe_evldh
7696 || icode == CODE_FOR_spe_evldw
7697 || icode == CODE_FOR_spe_evlhhesplat
7698 || icode == CODE_FOR_spe_evlhhossplat
7699 || icode == CODE_FOR_spe_evlhhousplat
7700 || icode == CODE_FOR_spe_evlwhe
7701 || icode == CODE_FOR_spe_evlwhos
7702 || icode == CODE_FOR_spe_evlwhou
7703 || icode == CODE_FOR_spe_evlwhsplat
7704 || icode == CODE_FOR_spe_evlwwsplat
7705 || icode == CODE_FOR_spe_evrlwi
7706 || icode == CODE_FOR_spe_evslwi
7707 || icode == CODE_FOR_spe_evsrwis
f5119d10 7708 || icode == CODE_FOR_spe_evsubifw
0559cc77 7709 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7710 {
7711 /* Only allow 5-bit unsigned literals. */
8bb418a3 7712 STRIP_NOPS (arg1);
b44140e7
AH
7713 if (TREE_CODE (arg1) != INTEGER_CST
7714 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7715 {
7716 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7717 return const0_rtx;
b44140e7 7718 }
b44140e7
AH
7719 }
7720
c62f2db5 7721 if (target == 0
0ac081f6
AH
7722 || GET_MODE (target) != tmode
7723 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7724 target = gen_reg_rtx (tmode);
7725
7726 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7727 op0 = copy_to_mode_reg (mode0, op0);
7728 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7729 op1 = copy_to_mode_reg (mode1, op1);
7730
7731 pat = GEN_FCN (icode) (target, op0, op1);
7732 if (! pat)
7733 return 0;
7734 emit_insn (pat);
7735
7736 return target;
7737}
6525c0e7 7738
ae4b4a02 7739static rtx
f676971a 7740altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7741 tree exp, rtx target)
ae4b4a02
AH
7742{
7743 rtx pat, scratch;
5039610b
SL
7744 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7745 tree arg0 = CALL_EXPR_ARG (exp, 1);
7746 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7747 rtx op0 = expand_normal (arg0);
7748 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7749 enum machine_mode tmode = SImode;
7750 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7751 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7752 int cr6_form_int;
7753
7754 if (TREE_CODE (cr6_form) != INTEGER_CST)
7755 {
7756 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7757 return const0_rtx;
ae4b4a02
AH
7758 }
7759 else
7760 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7761
37409796 7762 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7763
7764 /* If we have invalid arguments, bail out before generating bad rtl. */
7765 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7766 return const0_rtx;
ae4b4a02
AH
7767
7768 if (target == 0
7769 || GET_MODE (target) != tmode
7770 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7771 target = gen_reg_rtx (tmode);
7772
7773 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7774 op0 = copy_to_mode_reg (mode0, op0);
7775 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7776 op1 = copy_to_mode_reg (mode1, op1);
7777
7778 scratch = gen_reg_rtx (mode0);
7779
7780 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7781 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7782 if (! pat)
7783 return 0;
7784 emit_insn (pat);
7785
7786 /* The vec_any* and vec_all* predicates use the same opcodes for two
7787 different operations, but the bits in CR6 will be different
7788 depending on what information we want. So we have to play tricks
7789 with CR6 to get the right bits out.
7790
7791 If you think this is disgusting, look at the specs for the
7792 AltiVec predicates. */
7793
c4ad648e
AM
7794 switch (cr6_form_int)
7795 {
7796 case 0:
7797 emit_insn (gen_cr6_test_for_zero (target));
7798 break;
7799 case 1:
7800 emit_insn (gen_cr6_test_for_zero_reverse (target));
7801 break;
7802 case 2:
7803 emit_insn (gen_cr6_test_for_lt (target));
7804 break;
7805 case 3:
7806 emit_insn (gen_cr6_test_for_lt_reverse (target));
7807 break;
7808 default:
7809 error ("argument 1 of __builtin_altivec_predicate is out of range");
7810 break;
7811 }
ae4b4a02
AH
7812
7813 return target;
7814}
7815
96038623
DE
7816static rtx
7817paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7818{
7819 rtx pat, addr;
7820 tree arg0 = CALL_EXPR_ARG (exp, 0);
7821 tree arg1 = CALL_EXPR_ARG (exp, 1);
7822 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7823 enum machine_mode mode0 = Pmode;
7824 enum machine_mode mode1 = Pmode;
7825 rtx op0 = expand_normal (arg0);
7826 rtx op1 = expand_normal (arg1);
7827
7828 if (icode == CODE_FOR_nothing)
7829 /* Builtin not supported on this processor. */
7830 return 0;
7831
7832 /* If we got invalid arguments bail out before generating bad rtl. */
7833 if (arg0 == error_mark_node || arg1 == error_mark_node)
7834 return const0_rtx;
7835
7836 if (target == 0
7837 || GET_MODE (target) != tmode
7838 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7839 target = gen_reg_rtx (tmode);
7840
7841 op1 = copy_to_mode_reg (mode1, op1);
7842
7843 if (op0 == const0_rtx)
7844 {
7845 addr = gen_rtx_MEM (tmode, op1);
7846 }
7847 else
7848 {
7849 op0 = copy_to_mode_reg (mode0, op0);
7850 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7851 }
7852
7853 pat = GEN_FCN (icode) (target, addr);
7854
7855 if (! pat)
7856 return 0;
7857 emit_insn (pat);
7858
7859 return target;
7860}
7861
b4a62fa0 7862static rtx
5039610b 7863altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7864{
7865 rtx pat, addr;
5039610b
SL
7866 tree arg0 = CALL_EXPR_ARG (exp, 0);
7867 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7868 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7869 enum machine_mode mode0 = Pmode;
7870 enum machine_mode mode1 = Pmode;
84217346
MD
7871 rtx op0 = expand_normal (arg0);
7872 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7873
7874 if (icode == CODE_FOR_nothing)
7875 /* Builtin not supported on this processor. */
7876 return 0;
7877
7878 /* If we got invalid arguments bail out before generating bad rtl. */
7879 if (arg0 == error_mark_node || arg1 == error_mark_node)
7880 return const0_rtx;
7881
7882 if (target == 0
7883 || GET_MODE (target) != tmode
7884 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7885 target = gen_reg_rtx (tmode);
7886
f676971a 7887 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7888
7889 if (op0 == const0_rtx)
7890 {
7891 addr = gen_rtx_MEM (tmode, op1);
7892 }
7893 else
7894 {
7895 op0 = copy_to_mode_reg (mode0, op0);
7896 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7897 }
7898
7899 pat = GEN_FCN (icode) (target, addr);
7900
7901 if (! pat)
7902 return 0;
7903 emit_insn (pat);
7904
7905 return target;
7906}
7907
61bea3b0 7908static rtx
5039610b 7909spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7910{
5039610b
SL
7911 tree arg0 = CALL_EXPR_ARG (exp, 0);
7912 tree arg1 = CALL_EXPR_ARG (exp, 1);
7913 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7914 rtx op0 = expand_normal (arg0);
7915 rtx op1 = expand_normal (arg1);
7916 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7917 rtx pat;
7918 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7919 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7920 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7921
7922 /* Invalid arguments. Bail before doing anything stoopid! */
7923 if (arg0 == error_mark_node
7924 || arg1 == error_mark_node
7925 || arg2 == error_mark_node)
7926 return const0_rtx;
7927
7928 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7929 op0 = copy_to_mode_reg (mode2, op0);
7930 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7931 op1 = copy_to_mode_reg (mode0, op1);
7932 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7933 op2 = copy_to_mode_reg (mode1, op2);
7934
7935 pat = GEN_FCN (icode) (op1, op2, op0);
7936 if (pat)
7937 emit_insn (pat);
7938 return NULL_RTX;
7939}
7940
96038623
DE
7941static rtx
7942paired_expand_stv_builtin (enum insn_code icode, tree exp)
7943{
7944 tree arg0 = CALL_EXPR_ARG (exp, 0);
7945 tree arg1 = CALL_EXPR_ARG (exp, 1);
7946 tree arg2 = CALL_EXPR_ARG (exp, 2);
7947 rtx op0 = expand_normal (arg0);
7948 rtx op1 = expand_normal (arg1);
7949 rtx op2 = expand_normal (arg2);
7950 rtx pat, addr;
7951 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7952 enum machine_mode mode1 = Pmode;
7953 enum machine_mode mode2 = Pmode;
7954
7955 /* Invalid arguments. Bail before doing anything stoopid! */
7956 if (arg0 == error_mark_node
7957 || arg1 == error_mark_node
7958 || arg2 == error_mark_node)
7959 return const0_rtx;
7960
7961 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7962 op0 = copy_to_mode_reg (tmode, op0);
7963
7964 op2 = copy_to_mode_reg (mode2, op2);
7965
7966 if (op1 == const0_rtx)
7967 {
7968 addr = gen_rtx_MEM (tmode, op2);
7969 }
7970 else
7971 {
7972 op1 = copy_to_mode_reg (mode1, op1);
7973 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7974 }
7975
7976 pat = GEN_FCN (icode) (addr, op0);
7977 if (pat)
7978 emit_insn (pat);
7979 return NULL_RTX;
7980}
7981
6525c0e7 7982static rtx
5039610b 7983altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7984{
5039610b
SL
7985 tree arg0 = CALL_EXPR_ARG (exp, 0);
7986 tree arg1 = CALL_EXPR_ARG (exp, 1);
7987 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7988 rtx op0 = expand_normal (arg0);
7989 rtx op1 = expand_normal (arg1);
7990 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7991 rtx pat, addr;
7992 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7993 enum machine_mode mode1 = Pmode;
7994 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7995
7996 /* Invalid arguments. Bail before doing anything stoopid! */
7997 if (arg0 == error_mark_node
7998 || arg1 == error_mark_node
7999 || arg2 == error_mark_node)
9a171fcd 8000 return const0_rtx;
6525c0e7 8001
b4a62fa0
SB
8002 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8003 op0 = copy_to_mode_reg (tmode, op0);
8004
f676971a 8005 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8006
8007 if (op1 == const0_rtx)
8008 {
8009 addr = gen_rtx_MEM (tmode, op2);
8010 }
8011 else
8012 {
8013 op1 = copy_to_mode_reg (mode1, op1);
8014 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8015 }
6525c0e7 8016
b4a62fa0 8017 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8018 if (pat)
8019 emit_insn (pat);
8020 return NULL_RTX;
8021}
8022
2212663f 8023static rtx
5039610b 8024rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8025{
8026 rtx pat;
5039610b
SL
8027 tree arg0 = CALL_EXPR_ARG (exp, 0);
8028 tree arg1 = CALL_EXPR_ARG (exp, 1);
8029 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8030 rtx op0 = expand_normal (arg0);
8031 rtx op1 = expand_normal (arg1);
8032 rtx op2 = expand_normal (arg2);
2212663f
DB
8033 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8034 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8035 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8036 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8037
774b5662
DE
8038 if (icode == CODE_FOR_nothing)
8039 /* Builtin not supported on this processor. */
8040 return 0;
8041
20e26713
AH
8042 /* If we got invalid arguments bail out before generating bad rtl. */
8043 if (arg0 == error_mark_node
8044 || arg1 == error_mark_node
8045 || arg2 == error_mark_node)
9a171fcd 8046 return const0_rtx;
20e26713 8047
aba5fb01
NS
8048 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8049 || icode == CODE_FOR_altivec_vsldoi_v4si
8050 || icode == CODE_FOR_altivec_vsldoi_v8hi
8051 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8052 {
8053 /* Only allow 4-bit unsigned literals. */
8bb418a3 8054 STRIP_NOPS (arg2);
b44140e7
AH
8055 if (TREE_CODE (arg2) != INTEGER_CST
8056 || TREE_INT_CST_LOW (arg2) & ~0xf)
8057 {
8058 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8059 return const0_rtx;
b44140e7 8060 }
b44140e7
AH
8061 }
8062
c62f2db5 8063 if (target == 0
2212663f
DB
8064 || GET_MODE (target) != tmode
8065 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8066 target = gen_reg_rtx (tmode);
8067
8068 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8069 op0 = copy_to_mode_reg (mode0, op0);
8070 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8071 op1 = copy_to_mode_reg (mode1, op1);
8072 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8073 op2 = copy_to_mode_reg (mode2, op2);
8074
49e39588
RE
8075 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8076 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8077 else
8078 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8079 if (! pat)
8080 return 0;
8081 emit_insn (pat);
8082
8083 return target;
8084}
92898235 8085
3a9b8c7e 8086/* Expand the lvx builtins. */
0ac081f6 8087static rtx
a2369ed3 8088altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8089{
5039610b 8090 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8091 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8092 tree arg0;
8093 enum machine_mode tmode, mode0;
7c3abc73 8094 rtx pat, op0;
3a9b8c7e 8095 enum insn_code icode;
92898235 8096
0ac081f6
AH
8097 switch (fcode)
8098 {
f18c054f 8099 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8100 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8101 break;
f18c054f 8102 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8103 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8104 break;
8105 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8106 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8107 break;
8108 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8109 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8110 break;
8111 default:
8112 *expandedp = false;
8113 return NULL_RTX;
8114 }
0ac081f6 8115
3a9b8c7e 8116 *expandedp = true;
f18c054f 8117
5039610b 8118 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8119 op0 = expand_normal (arg0);
3a9b8c7e
AH
8120 tmode = insn_data[icode].operand[0].mode;
8121 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8122
3a9b8c7e
AH
8123 if (target == 0
8124 || GET_MODE (target) != tmode
8125 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8126 target = gen_reg_rtx (tmode);
24408032 8127
3a9b8c7e
AH
8128 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8129 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8130
3a9b8c7e
AH
8131 pat = GEN_FCN (icode) (target, op0);
8132 if (! pat)
8133 return 0;
8134 emit_insn (pat);
8135 return target;
8136}
f18c054f 8137
3a9b8c7e
AH
8138/* Expand the stvx builtins. */
8139static rtx
f676971a 8140altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8141 bool *expandedp)
3a9b8c7e 8142{
5039610b 8143 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8144 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8145 tree arg0, arg1;
8146 enum machine_mode mode0, mode1;
7c3abc73 8147 rtx pat, op0, op1;
3a9b8c7e 8148 enum insn_code icode;
f18c054f 8149
3a9b8c7e
AH
8150 switch (fcode)
8151 {
8152 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8153 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8154 break;
8155 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8156 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8157 break;
8158 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8159 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8160 break;
8161 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8162 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8163 break;
8164 default:
8165 *expandedp = false;
8166 return NULL_RTX;
8167 }
24408032 8168
5039610b
SL
8169 arg0 = CALL_EXPR_ARG (exp, 0);
8170 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8171 op0 = expand_normal (arg0);
8172 op1 = expand_normal (arg1);
3a9b8c7e
AH
8173 mode0 = insn_data[icode].operand[0].mode;
8174 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8175
3a9b8c7e
AH
8176 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8177 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8178 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8179 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8180
3a9b8c7e
AH
8181 pat = GEN_FCN (icode) (op0, op1);
8182 if (pat)
8183 emit_insn (pat);
f18c054f 8184
3a9b8c7e
AH
8185 *expandedp = true;
8186 return NULL_RTX;
8187}
f18c054f 8188
3a9b8c7e
AH
8189/* Expand the dst builtins. */
8190static rtx
f676971a 8191altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8192 bool *expandedp)
3a9b8c7e 8193{
5039610b 8194 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8195 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8196 tree arg0, arg1, arg2;
8197 enum machine_mode mode0, mode1, mode2;
7c3abc73 8198 rtx pat, op0, op1, op2;
586de218 8199 const struct builtin_description *d;
a3170dc6 8200 size_t i;
f18c054f 8201
3a9b8c7e 8202 *expandedp = false;
f18c054f 8203
3a9b8c7e 8204 /* Handle DST variants. */
586de218 8205 d = bdesc_dst;
3a9b8c7e
AH
8206 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8207 if (d->code == fcode)
8208 {
5039610b
SL
8209 arg0 = CALL_EXPR_ARG (exp, 0);
8210 arg1 = CALL_EXPR_ARG (exp, 1);
8211 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8212 op0 = expand_normal (arg0);
8213 op1 = expand_normal (arg1);
8214 op2 = expand_normal (arg2);
3a9b8c7e
AH
8215 mode0 = insn_data[d->icode].operand[0].mode;
8216 mode1 = insn_data[d->icode].operand[1].mode;
8217 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8218
3a9b8c7e
AH
8219 /* Invalid arguments, bail out before generating bad rtl. */
8220 if (arg0 == error_mark_node
8221 || arg1 == error_mark_node
8222 || arg2 == error_mark_node)
8223 return const0_rtx;
f18c054f 8224
86e7df90 8225 *expandedp = true;
8bb418a3 8226 STRIP_NOPS (arg2);
3a9b8c7e
AH
8227 if (TREE_CODE (arg2) != INTEGER_CST
8228 || TREE_INT_CST_LOW (arg2) & ~0x3)
8229 {
9e637a26 8230 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8231 return const0_rtx;
8232 }
f18c054f 8233
3a9b8c7e 8234 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8235 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8236 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8237 op1 = copy_to_mode_reg (mode1, op1);
24408032 8238
3a9b8c7e
AH
8239 pat = GEN_FCN (d->icode) (op0, op1, op2);
8240 if (pat != 0)
8241 emit_insn (pat);
f18c054f 8242
3a9b8c7e
AH
8243 return NULL_RTX;
8244 }
f18c054f 8245
3a9b8c7e
AH
8246 return NULL_RTX;
8247}
24408032 8248
7a4eca66
DE
8249/* Expand vec_init builtin. */
8250static rtx
5039610b 8251altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8252{
8253 enum machine_mode tmode = TYPE_MODE (type);
8254 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8255 int i, n_elt = GET_MODE_NUNITS (tmode);
8256 rtvec v = rtvec_alloc (n_elt);
8257
8258 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8259 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8260
5039610b 8261 for (i = 0; i < n_elt; ++i)
7a4eca66 8262 {
5039610b 8263 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8264 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8265 }
8266
7a4eca66
DE
8267 if (!target || !register_operand (target, tmode))
8268 target = gen_reg_rtx (tmode);
8269
8270 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8271 return target;
8272}
8273
8274/* Return the integer constant in ARG. Constrain it to be in the range
8275 of the subparts of VEC_TYPE; issue an error if not. */
8276
8277static int
8278get_element_number (tree vec_type, tree arg)
8279{
8280 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8281
8282 if (!host_integerp (arg, 1)
8283 || (elt = tree_low_cst (arg, 1), elt > max))
8284 {
8285 error ("selector must be an integer constant in the range 0..%wi", max);
8286 return 0;
8287 }
8288
8289 return elt;
8290}
8291
8292/* Expand vec_set builtin. */
8293static rtx
5039610b 8294altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8295{
8296 enum machine_mode tmode, mode1;
8297 tree arg0, arg1, arg2;
8298 int elt;
8299 rtx op0, op1;
8300
5039610b
SL
8301 arg0 = CALL_EXPR_ARG (exp, 0);
8302 arg1 = CALL_EXPR_ARG (exp, 1);
8303 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8304
8305 tmode = TYPE_MODE (TREE_TYPE (arg0));
8306 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8307 gcc_assert (VECTOR_MODE_P (tmode));
8308
8309 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8310 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8311 elt = get_element_number (TREE_TYPE (arg0), arg2);
8312
8313 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8314 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8315
8316 op0 = force_reg (tmode, op0);
8317 op1 = force_reg (mode1, op1);
8318
8319 rs6000_expand_vector_set (op0, op1, elt);
8320
8321 return op0;
8322}
8323
8324/* Expand vec_ext builtin. */
8325static rtx
5039610b 8326altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8327{
8328 enum machine_mode tmode, mode0;
8329 tree arg0, arg1;
8330 int elt;
8331 rtx op0;
8332
5039610b
SL
8333 arg0 = CALL_EXPR_ARG (exp, 0);
8334 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8335
84217346 8336 op0 = expand_normal (arg0);
7a4eca66
DE
8337 elt = get_element_number (TREE_TYPE (arg0), arg1);
8338
8339 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8340 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8341 gcc_assert (VECTOR_MODE_P (mode0));
8342
8343 op0 = force_reg (mode0, op0);
8344
8345 if (optimize || !target || !register_operand (target, tmode))
8346 target = gen_reg_rtx (tmode);
8347
8348 rs6000_expand_vector_extract (target, op0, elt);
8349
8350 return target;
8351}
8352
3a9b8c7e
AH
8353/* Expand the builtin in EXP and store the result in TARGET. Store
8354 true in *EXPANDEDP if we found a builtin to expand. */
8355static rtx
a2369ed3 8356altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8357{
586de218
KG
8358 const struct builtin_description *d;
8359 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8360 size_t i;
8361 enum insn_code icode;
5039610b 8362 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8363 tree arg0;
8364 rtx op0, pat;
8365 enum machine_mode tmode, mode0;
3a9b8c7e 8366 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8367
58646b77
PB
8368 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8369 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8370 {
8371 *expandedp = true;
ea40ba9c 8372 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8373 return const0_rtx;
8374 }
8375
3a9b8c7e
AH
8376 target = altivec_expand_ld_builtin (exp, target, expandedp);
8377 if (*expandedp)
8378 return target;
0ac081f6 8379
3a9b8c7e
AH
8380 target = altivec_expand_st_builtin (exp, target, expandedp);
8381 if (*expandedp)
8382 return target;
8383
8384 target = altivec_expand_dst_builtin (exp, target, expandedp);
8385 if (*expandedp)
8386 return target;
8387
8388 *expandedp = true;
95385cbb 8389
3a9b8c7e
AH
8390 switch (fcode)
8391 {
6525c0e7 8392 case ALTIVEC_BUILTIN_STVX:
5039610b 8393 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8394 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8395 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8396 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8397 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8398 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8399 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8400 case ALTIVEC_BUILTIN_STVXL:
5039610b 8401 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8402
95385cbb
AH
8403 case ALTIVEC_BUILTIN_MFVSCR:
8404 icode = CODE_FOR_altivec_mfvscr;
8405 tmode = insn_data[icode].operand[0].mode;
8406
8407 if (target == 0
8408 || GET_MODE (target) != tmode
8409 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8410 target = gen_reg_rtx (tmode);
f676971a 8411
95385cbb 8412 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8413 if (! pat)
8414 return 0;
8415 emit_insn (pat);
95385cbb
AH
8416 return target;
8417
8418 case ALTIVEC_BUILTIN_MTVSCR:
8419 icode = CODE_FOR_altivec_mtvscr;
5039610b 8420 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8421 op0 = expand_normal (arg0);
95385cbb
AH
8422 mode0 = insn_data[icode].operand[0].mode;
8423
8424 /* If we got invalid arguments bail out before generating bad rtl. */
8425 if (arg0 == error_mark_node)
9a171fcd 8426 return const0_rtx;
95385cbb
AH
8427
8428 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8429 op0 = copy_to_mode_reg (mode0, op0);
8430
8431 pat = GEN_FCN (icode) (op0);
8432 if (pat)
8433 emit_insn (pat);
8434 return NULL_RTX;
3a9b8c7e 8435
95385cbb
AH
8436 case ALTIVEC_BUILTIN_DSSALL:
8437 emit_insn (gen_altivec_dssall ());
8438 return NULL_RTX;
8439
8440 case ALTIVEC_BUILTIN_DSS:
8441 icode = CODE_FOR_altivec_dss;
5039610b 8442 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8443 STRIP_NOPS (arg0);
84217346 8444 op0 = expand_normal (arg0);
95385cbb
AH
8445 mode0 = insn_data[icode].operand[0].mode;
8446
8447 /* If we got invalid arguments bail out before generating bad rtl. */
8448 if (arg0 == error_mark_node)
9a171fcd 8449 return const0_rtx;
95385cbb 8450
b44140e7
AH
8451 if (TREE_CODE (arg0) != INTEGER_CST
8452 || TREE_INT_CST_LOW (arg0) & ~0x3)
8453 {
8454 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8455 return const0_rtx;
b44140e7
AH
8456 }
8457
95385cbb
AH
8458 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8459 op0 = copy_to_mode_reg (mode0, op0);
8460
8461 emit_insn (gen_altivec_dss (op0));
0ac081f6 8462 return NULL_RTX;
7a4eca66
DE
8463
8464 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8465 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8466 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8467 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8468 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8469
8470 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8471 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8472 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8473 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8474 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8475
8476 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8477 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8478 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8479 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8480 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8481
8482 default:
8483 break;
8484 /* Fall through. */
0ac081f6 8485 }
24408032 8486
100c4561 8487 /* Expand abs* operations. */
586de218 8488 d = bdesc_abs;
ca7558fc 8489 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8490 if (d->code == fcode)
5039610b 8491 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8492
ae4b4a02 8493 /* Expand the AltiVec predicates. */
586de218 8494 dp = bdesc_altivec_preds;
ca7558fc 8495 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8496 if (dp->code == fcode)
c4ad648e 8497 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8498 exp, target);
ae4b4a02 8499
6525c0e7
AH
8500 /* LV* are funky. We initialized them differently. */
8501 switch (fcode)
8502 {
8503 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8504 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8505 exp, target);
6525c0e7 8506 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8507 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8508 exp, target);
6525c0e7 8509 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8510 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8511 exp, target);
6525c0e7 8512 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8513 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8514 exp, target);
6525c0e7 8515 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8516 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8517 exp, target);
6525c0e7 8518 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8519 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8520 exp, target);
6525c0e7 8521 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8522 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8523 exp, target);
6525c0e7
AH
8524 default:
8525 break;
8526 /* Fall through. */
8527 }
95385cbb 8528
92898235 8529 *expandedp = false;
0ac081f6
AH
8530 return NULL_RTX;
8531}
8532
96038623
DE
8533/* Expand the builtin in EXP and store the result in TARGET. Store
8534 true in *EXPANDEDP if we found a builtin to expand. */
8535static rtx
8536paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8537{
8538 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8539 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8540 const struct builtin_description *d;
96038623
DE
8541 size_t i;
8542
8543 *expandedp = true;
8544
8545 switch (fcode)
8546 {
8547 case PAIRED_BUILTIN_STX:
8548 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8549 case PAIRED_BUILTIN_LX:
8550 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8551 default:
8552 break;
8553 /* Fall through. */
8554 }
8555
8556 /* Expand the paired predicates. */
23a651fc 8557 d = bdesc_paired_preds;
96038623
DE
8558 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8559 if (d->code == fcode)
8560 return paired_expand_predicate_builtin (d->icode, exp, target);
8561
8562 *expandedp = false;
8563 return NULL_RTX;
8564}
8565
a3170dc6
AH
8566/* Binops that need to be initialized manually, but can be expanded
8567 automagically by rs6000_expand_binop_builtin. */
8568static struct builtin_description bdesc_2arg_spe[] =
8569{
8570 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8571 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8572 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8573 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8574 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8575 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8576 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8577 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8578 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8579 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8580 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8581 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8582 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8583 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8584 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8585 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8586 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8587 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8588 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8589 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8590 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8591 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8592};
8593
8594/* Expand the builtin in EXP and store the result in TARGET. Store
8595 true in *EXPANDEDP if we found a builtin to expand.
8596
8597 This expands the SPE builtins that are not simple unary and binary
8598 operations. */
8599static rtx
a2369ed3 8600spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8601{
5039610b 8602 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8603 tree arg1, arg0;
8604 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8605 enum insn_code icode;
8606 enum machine_mode tmode, mode0;
8607 rtx pat, op0;
8608 struct builtin_description *d;
8609 size_t i;
8610
8611 *expandedp = true;
8612
8613 /* Syntax check for a 5-bit unsigned immediate. */
8614 switch (fcode)
8615 {
8616 case SPE_BUILTIN_EVSTDD:
8617 case SPE_BUILTIN_EVSTDH:
8618 case SPE_BUILTIN_EVSTDW:
8619 case SPE_BUILTIN_EVSTWHE:
8620 case SPE_BUILTIN_EVSTWHO:
8621 case SPE_BUILTIN_EVSTWWE:
8622 case SPE_BUILTIN_EVSTWWO:
5039610b 8623 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8624 if (TREE_CODE (arg1) != INTEGER_CST
8625 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8626 {
8627 error ("argument 2 must be a 5-bit unsigned literal");
8628 return const0_rtx;
8629 }
8630 break;
8631 default:
8632 break;
8633 }
8634
00332c9f
AH
8635 /* The evsplat*i instructions are not quite generic. */
8636 switch (fcode)
8637 {
8638 case SPE_BUILTIN_EVSPLATFI:
8639 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8640 exp, target);
00332c9f
AH
8641 case SPE_BUILTIN_EVSPLATI:
8642 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8643 exp, target);
00332c9f
AH
8644 default:
8645 break;
8646 }
8647
a3170dc6
AH
8648 d = (struct builtin_description *) bdesc_2arg_spe;
8649 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8650 if (d->code == fcode)
5039610b 8651 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8652
8653 d = (struct builtin_description *) bdesc_spe_predicates;
8654 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8655 if (d->code == fcode)
5039610b 8656 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8657
8658 d = (struct builtin_description *) bdesc_spe_evsel;
8659 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8660 if (d->code == fcode)
5039610b 8661 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8662
8663 switch (fcode)
8664 {
8665 case SPE_BUILTIN_EVSTDDX:
5039610b 8666 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8667 case SPE_BUILTIN_EVSTDHX:
5039610b 8668 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8669 case SPE_BUILTIN_EVSTDWX:
5039610b 8670 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8671 case SPE_BUILTIN_EVSTWHEX:
5039610b 8672 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8673 case SPE_BUILTIN_EVSTWHOX:
5039610b 8674 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8675 case SPE_BUILTIN_EVSTWWEX:
5039610b 8676 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8677 case SPE_BUILTIN_EVSTWWOX:
5039610b 8678 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8679 case SPE_BUILTIN_EVSTDD:
5039610b 8680 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8681 case SPE_BUILTIN_EVSTDH:
5039610b 8682 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8683 case SPE_BUILTIN_EVSTDW:
5039610b 8684 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8685 case SPE_BUILTIN_EVSTWHE:
5039610b 8686 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8687 case SPE_BUILTIN_EVSTWHO:
5039610b 8688 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8689 case SPE_BUILTIN_EVSTWWE:
5039610b 8690 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8691 case SPE_BUILTIN_EVSTWWO:
5039610b 8692 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8693 case SPE_BUILTIN_MFSPEFSCR:
8694 icode = CODE_FOR_spe_mfspefscr;
8695 tmode = insn_data[icode].operand[0].mode;
8696
8697 if (target == 0
8698 || GET_MODE (target) != tmode
8699 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8700 target = gen_reg_rtx (tmode);
f676971a 8701
a3170dc6
AH
8702 pat = GEN_FCN (icode) (target);
8703 if (! pat)
8704 return 0;
8705 emit_insn (pat);
8706 return target;
8707 case SPE_BUILTIN_MTSPEFSCR:
8708 icode = CODE_FOR_spe_mtspefscr;
5039610b 8709 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8710 op0 = expand_normal (arg0);
a3170dc6
AH
8711 mode0 = insn_data[icode].operand[0].mode;
8712
8713 if (arg0 == error_mark_node)
8714 return const0_rtx;
8715
8716 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8717 op0 = copy_to_mode_reg (mode0, op0);
8718
8719 pat = GEN_FCN (icode) (op0);
8720 if (pat)
8721 emit_insn (pat);
8722 return NULL_RTX;
8723 default:
8724 break;
8725 }
8726
8727 *expandedp = false;
8728 return NULL_RTX;
8729}
8730
96038623
DE
8731static rtx
8732paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8733{
8734 rtx pat, scratch, tmp;
8735 tree form = CALL_EXPR_ARG (exp, 0);
8736 tree arg0 = CALL_EXPR_ARG (exp, 1);
8737 tree arg1 = CALL_EXPR_ARG (exp, 2);
8738 rtx op0 = expand_normal (arg0);
8739 rtx op1 = expand_normal (arg1);
8740 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8741 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8742 int form_int;
8743 enum rtx_code code;
8744
8745 if (TREE_CODE (form) != INTEGER_CST)
8746 {
8747 error ("argument 1 of __builtin_paired_predicate must be a constant");
8748 return const0_rtx;
8749 }
8750 else
8751 form_int = TREE_INT_CST_LOW (form);
8752
8753 gcc_assert (mode0 == mode1);
8754
8755 if (arg0 == error_mark_node || arg1 == error_mark_node)
8756 return const0_rtx;
8757
8758 if (target == 0
8759 || GET_MODE (target) != SImode
8760 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8761 target = gen_reg_rtx (SImode);
8762 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8763 op0 = copy_to_mode_reg (mode0, op0);
8764 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8765 op1 = copy_to_mode_reg (mode1, op1);
8766
8767 scratch = gen_reg_rtx (CCFPmode);
8768
8769 pat = GEN_FCN (icode) (scratch, op0, op1);
8770 if (!pat)
8771 return const0_rtx;
8772
8773 emit_insn (pat);
8774
8775 switch (form_int)
8776 {
8777 /* LT bit. */
8778 case 0:
8779 code = LT;
8780 break;
8781 /* GT bit. */
8782 case 1:
8783 code = GT;
8784 break;
8785 /* EQ bit. */
8786 case 2:
8787 code = EQ;
8788 break;
8789 /* UN bit. */
8790 case 3:
8791 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8792 return target;
8793 default:
8794 error ("argument 1 of __builtin_paired_predicate is out of range");
8795 return const0_rtx;
8796 }
8797
8798 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8799 emit_move_insn (target, tmp);
8800 return target;
8801}
8802
a3170dc6 8803static rtx
5039610b 8804spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8805{
8806 rtx pat, scratch, tmp;
5039610b
SL
8807 tree form = CALL_EXPR_ARG (exp, 0);
8808 tree arg0 = CALL_EXPR_ARG (exp, 1);
8809 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8810 rtx op0 = expand_normal (arg0);
8811 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8812 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8813 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8814 int form_int;
8815 enum rtx_code code;
8816
8817 if (TREE_CODE (form) != INTEGER_CST)
8818 {
8819 error ("argument 1 of __builtin_spe_predicate must be a constant");
8820 return const0_rtx;
8821 }
8822 else
8823 form_int = TREE_INT_CST_LOW (form);
8824
37409796 8825 gcc_assert (mode0 == mode1);
a3170dc6
AH
8826
8827 if (arg0 == error_mark_node || arg1 == error_mark_node)
8828 return const0_rtx;
8829
8830 if (target == 0
8831 || GET_MODE (target) != SImode
8832 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8833 target = gen_reg_rtx (SImode);
8834
8835 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8836 op0 = copy_to_mode_reg (mode0, op0);
8837 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8838 op1 = copy_to_mode_reg (mode1, op1);
8839
8840 scratch = gen_reg_rtx (CCmode);
8841
8842 pat = GEN_FCN (icode) (scratch, op0, op1);
8843 if (! pat)
8844 return const0_rtx;
8845 emit_insn (pat);
8846
8847 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8848 _lower_. We use one compare, but look in different bits of the
8849 CR for each variant.
8850
8851 There are 2 elements in each SPE simd type (upper/lower). The CR
8852 bits are set as follows:
8853
8854 BIT0 | BIT 1 | BIT 2 | BIT 3
8855 U | L | (U | L) | (U & L)
8856
8857 So, for an "all" relationship, BIT 3 would be set.
8858 For an "any" relationship, BIT 2 would be set. Etc.
8859
8860 Following traditional nomenclature, these bits map to:
8861
8862 BIT0 | BIT 1 | BIT 2 | BIT 3
8863 LT | GT | EQ | OV
8864
8865 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8866 */
8867
8868 switch (form_int)
8869 {
8870 /* All variant. OV bit. */
8871 case 0:
8872 /* We need to get to the OV bit, which is the ORDERED bit. We
8873 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8874 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8875 So let's just use another pattern. */
8876 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8877 return target;
8878 /* Any variant. EQ bit. */
8879 case 1:
8880 code = EQ;
8881 break;
8882 /* Upper variant. LT bit. */
8883 case 2:
8884 code = LT;
8885 break;
8886 /* Lower variant. GT bit. */
8887 case 3:
8888 code = GT;
8889 break;
8890 default:
8891 error ("argument 1 of __builtin_spe_predicate is out of range");
8892 return const0_rtx;
8893 }
8894
8895 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8896 emit_move_insn (target, tmp);
8897
8898 return target;
8899}
8900
8901/* The evsel builtins look like this:
8902
8903 e = __builtin_spe_evsel_OP (a, b, c, d);
8904
8905 and work like this:
8906
8907 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8908 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8909*/
8910
8911static rtx
5039610b 8912spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8913{
8914 rtx pat, scratch;
5039610b
SL
8915 tree arg0 = CALL_EXPR_ARG (exp, 0);
8916 tree arg1 = CALL_EXPR_ARG (exp, 1);
8917 tree arg2 = CALL_EXPR_ARG (exp, 2);
8918 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8919 rtx op0 = expand_normal (arg0);
8920 rtx op1 = expand_normal (arg1);
8921 rtx op2 = expand_normal (arg2);
8922 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8923 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8924 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8925
37409796 8926 gcc_assert (mode0 == mode1);
a3170dc6
AH
8927
8928 if (arg0 == error_mark_node || arg1 == error_mark_node
8929 || arg2 == error_mark_node || arg3 == error_mark_node)
8930 return const0_rtx;
8931
8932 if (target == 0
8933 || GET_MODE (target) != mode0
8934 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8935 target = gen_reg_rtx (mode0);
8936
8937 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8938 op0 = copy_to_mode_reg (mode0, op0);
8939 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8940 op1 = copy_to_mode_reg (mode0, op1);
8941 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8942 op2 = copy_to_mode_reg (mode0, op2);
8943 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8944 op3 = copy_to_mode_reg (mode0, op3);
8945
8946 /* Generate the compare. */
8947 scratch = gen_reg_rtx (CCmode);
8948 pat = GEN_FCN (icode) (scratch, op0, op1);
8949 if (! pat)
8950 return const0_rtx;
8951 emit_insn (pat);
8952
8953 if (mode0 == V2SImode)
8954 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8955 else
8956 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8957
8958 return target;
8959}
8960
0ac081f6
AH
8961/* Expand an expression EXP that calls a built-in function,
8962 with result going to TARGET if that's convenient
8963 (and in mode MODE if that's convenient).
8964 SUBTARGET may be used as the target for computing one of EXP's operands.
8965 IGNORE is nonzero if the value is to be ignored. */
8966
8967static rtx
a2369ed3 8968rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8969 enum machine_mode mode ATTRIBUTE_UNUSED,
8970 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8971{
5039610b 8972 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8973 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8974 const struct builtin_description *d;
92898235
AH
8975 size_t i;
8976 rtx ret;
8977 bool success;
f676971a 8978
9c78b944
DE
8979 if (fcode == RS6000_BUILTIN_RECIP)
8980 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8981
8982 if (fcode == RS6000_BUILTIN_RECIPF)
8983 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8984
8985 if (fcode == RS6000_BUILTIN_RSQRTF)
8986 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8987
7ccf35ed
DN
8988 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8989 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8990 {
8991 int icode = (int) CODE_FOR_altivec_lvsr;
8992 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8993 enum machine_mode mode = insn_data[icode].operand[1].mode;
8994 tree arg;
8995 rtx op, addr, pat;
8996
37409796 8997 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8998
5039610b 8999 arg = CALL_EXPR_ARG (exp, 0);
37409796 9000 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9001 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9002 addr = memory_address (mode, op);
9003 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9004 op = addr;
9005 else
9006 {
9007 /* For the load case need to negate the address. */
9008 op = gen_reg_rtx (GET_MODE (addr));
9009 emit_insn (gen_rtx_SET (VOIDmode, op,
9010 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9011 }
7ccf35ed
DN
9012 op = gen_rtx_MEM (mode, op);
9013
9014 if (target == 0
9015 || GET_MODE (target) != tmode
9016 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9017 target = gen_reg_rtx (tmode);
9018
9019 /*pat = gen_altivec_lvsr (target, op);*/
9020 pat = GEN_FCN (icode) (target, op);
9021 if (!pat)
9022 return 0;
9023 emit_insn (pat);
9024
9025 return target;
9026 }
5039610b
SL
9027
9028 /* FIXME: There's got to be a nicer way to handle this case than
9029 constructing a new CALL_EXPR. */
f57d17f1
TM
9030 if (fcode == ALTIVEC_BUILTIN_VCFUX
9031 || fcode == ALTIVEC_BUILTIN_VCFSX)
9032 {
5039610b
SL
9033 if (call_expr_nargs (exp) == 1)
9034 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9035 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9036 }
7ccf35ed 9037
0ac081f6 9038 if (TARGET_ALTIVEC)
92898235
AH
9039 {
9040 ret = altivec_expand_builtin (exp, target, &success);
9041
a3170dc6
AH
9042 if (success)
9043 return ret;
9044 }
9045 if (TARGET_SPE)
9046 {
9047 ret = spe_expand_builtin (exp, target, &success);
9048
92898235
AH
9049 if (success)
9050 return ret;
9051 }
96038623
DE
9052 if (TARGET_PAIRED_FLOAT)
9053 {
9054 ret = paired_expand_builtin (exp, target, &success);
9055
9056 if (success)
9057 return ret;
9058 }
92898235 9059
96038623 9060 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9061
37409796
NS
9062 /* Handle simple unary operations. */
9063 d = (struct builtin_description *) bdesc_1arg;
9064 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9065 if (d->code == fcode)
5039610b 9066 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9067
37409796
NS
9068 /* Handle simple binary operations. */
9069 d = (struct builtin_description *) bdesc_2arg;
9070 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9071 if (d->code == fcode)
5039610b 9072 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9073
37409796 9074 /* Handle simple ternary operations. */
586de218 9075 d = bdesc_3arg;
37409796
NS
9076 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9077 if (d->code == fcode)
5039610b 9078 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9079
37409796 9080 gcc_unreachable ();
0ac081f6
AH
9081}
9082
7c62e993
PB
9083static tree
9084build_opaque_vector_type (tree node, int nunits)
9085{
9086 node = copy_node (node);
9087 TYPE_MAIN_VARIANT (node) = node;
9088 return build_vector_type (node, nunits);
9089}
9090
0ac081f6 9091static void
863d938c 9092rs6000_init_builtins (void)
0ac081f6 9093{
4a5eab38
PB
9094 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9095 V2SF_type_node = build_vector_type (float_type_node, 2);
9096 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9097 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9098 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9099 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9100 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9101
9102 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9103 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9104 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9105
7c62e993
PB
9106 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9107 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9108 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9109 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9110
8bb418a3
ZL
9111 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9112 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9113 'vector unsigned short'. */
9114
8dd16ecc
NS
9115 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9116 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9117 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9118 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9119
58646b77
PB
9120 long_integer_type_internal_node = long_integer_type_node;
9121 long_unsigned_type_internal_node = long_unsigned_type_node;
9122 intQI_type_internal_node = intQI_type_node;
9123 uintQI_type_internal_node = unsigned_intQI_type_node;
9124 intHI_type_internal_node = intHI_type_node;
9125 uintHI_type_internal_node = unsigned_intHI_type_node;
9126 intSI_type_internal_node = intSI_type_node;
9127 uintSI_type_internal_node = unsigned_intSI_type_node;
9128 float_type_internal_node = float_type_node;
9129 void_type_internal_node = void_type_node;
9130
8bb418a3
ZL
9131 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9132 get_identifier ("__bool char"),
9133 bool_char_type_node));
9134 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9135 get_identifier ("__bool short"),
9136 bool_short_type_node));
9137 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9138 get_identifier ("__bool int"),
9139 bool_int_type_node));
9140 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9141 get_identifier ("__pixel"),
9142 pixel_type_node));
9143
4a5eab38
PB
9144 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9145 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9146 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9147 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9148
9149 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9150 get_identifier ("__vector unsigned char"),
9151 unsigned_V16QI_type_node));
9152 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9153 get_identifier ("__vector signed char"),
9154 V16QI_type_node));
9155 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9156 get_identifier ("__vector __bool char"),
9157 bool_V16QI_type_node));
9158
9159 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9160 get_identifier ("__vector unsigned short"),
9161 unsigned_V8HI_type_node));
9162 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9163 get_identifier ("__vector signed short"),
9164 V8HI_type_node));
9165 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9166 get_identifier ("__vector __bool short"),
9167 bool_V8HI_type_node));
9168
9169 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9170 get_identifier ("__vector unsigned int"),
9171 unsigned_V4SI_type_node));
9172 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9173 get_identifier ("__vector signed int"),
9174 V4SI_type_node));
9175 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9176 get_identifier ("__vector __bool int"),
9177 bool_V4SI_type_node));
9178
9179 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9180 get_identifier ("__vector float"),
9181 V4SF_type_node));
9182 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9183 get_identifier ("__vector __pixel"),
9184 pixel_V8HI_type_node));
9185
96038623
DE
9186 if (TARGET_PAIRED_FLOAT)
9187 paired_init_builtins ();
a3170dc6 9188 if (TARGET_SPE)
3fdaa45a 9189 spe_init_builtins ();
0ac081f6
AH
9190 if (TARGET_ALTIVEC)
9191 altivec_init_builtins ();
96038623 9192 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9193 rs6000_common_init_builtins ();
9c78b944
DE
9194 if (TARGET_PPC_GFXOPT)
9195 {
9196 tree ftype = build_function_type_list (float_type_node,
9197 float_type_node,
9198 float_type_node,
9199 NULL_TREE);
9200 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9201 RS6000_BUILTIN_RECIPF);
9202
9203 ftype = build_function_type_list (float_type_node,
9204 float_type_node,
9205 NULL_TREE);
9206 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9207 RS6000_BUILTIN_RSQRTF);
9208 }
9209 if (TARGET_POPCNTB)
9210 {
9211 tree ftype = build_function_type_list (double_type_node,
9212 double_type_node,
9213 double_type_node,
9214 NULL_TREE);
9215 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9216 RS6000_BUILTIN_RECIP);
9217
9218 }
69ca3549
DE
9219
9220#if TARGET_XCOFF
9221 /* AIX libm provides clog as __clog. */
9222 if (built_in_decls [BUILT_IN_CLOG])
9223 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9224#endif
fb220235
FXC
9225
9226#ifdef SUBTARGET_INIT_BUILTINS
9227 SUBTARGET_INIT_BUILTINS;
9228#endif
0ac081f6
AH
9229}
9230
a3170dc6
AH
9231/* Search through a set of builtins and enable the mask bits.
9232 DESC is an array of builtins.
b6d08ca1 9233 SIZE is the total number of builtins.
a3170dc6
AH
9234 START is the builtin enum at which to start.
9235 END is the builtin enum at which to end. */
0ac081f6 9236static void
a2369ed3 9237enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9238 enum rs6000_builtins start,
a2369ed3 9239 enum rs6000_builtins end)
a3170dc6
AH
9240{
9241 int i;
9242
9243 for (i = 0; i < size; ++i)
9244 if (desc[i].code == start)
9245 break;
9246
9247 if (i == size)
9248 return;
9249
9250 for (; i < size; ++i)
9251 {
9252 /* Flip all the bits on. */
9253 desc[i].mask = target_flags;
9254 if (desc[i].code == end)
9255 break;
9256 }
9257}
9258
9259static void
863d938c 9260spe_init_builtins (void)
0ac081f6 9261{
a3170dc6
AH
9262 tree endlink = void_list_node;
9263 tree puint_type_node = build_pointer_type (unsigned_type_node);
9264 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9265 struct builtin_description *d;
0ac081f6
AH
9266 size_t i;
9267
a3170dc6
AH
9268 tree v2si_ftype_4_v2si
9269 = build_function_type
3fdaa45a
AH
9270 (opaque_V2SI_type_node,
9271 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9272 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9273 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9274 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9275 endlink)))));
9276
9277 tree v2sf_ftype_4_v2sf
9278 = build_function_type
3fdaa45a
AH
9279 (opaque_V2SF_type_node,
9280 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9281 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9282 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9283 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9284 endlink)))));
9285
9286 tree int_ftype_int_v2si_v2si
9287 = build_function_type
9288 (integer_type_node,
9289 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9290 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9291 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9292 endlink))));
9293
9294 tree int_ftype_int_v2sf_v2sf
9295 = build_function_type
9296 (integer_type_node,
9297 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9298 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9299 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9300 endlink))));
9301
9302 tree void_ftype_v2si_puint_int
9303 = build_function_type (void_type_node,
3fdaa45a 9304 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9305 tree_cons (NULL_TREE, puint_type_node,
9306 tree_cons (NULL_TREE,
9307 integer_type_node,
9308 endlink))));
9309
9310 tree void_ftype_v2si_puint_char
9311 = build_function_type (void_type_node,
3fdaa45a 9312 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9313 tree_cons (NULL_TREE, puint_type_node,
9314 tree_cons (NULL_TREE,
9315 char_type_node,
9316 endlink))));
9317
9318 tree void_ftype_v2si_pv2si_int
9319 = build_function_type (void_type_node,
3fdaa45a 9320 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9321 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9322 tree_cons (NULL_TREE,
9323 integer_type_node,
9324 endlink))));
9325
9326 tree void_ftype_v2si_pv2si_char
9327 = build_function_type (void_type_node,
3fdaa45a 9328 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9329 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9330 tree_cons (NULL_TREE,
9331 char_type_node,
9332 endlink))));
9333
9334 tree void_ftype_int
9335 = build_function_type (void_type_node,
9336 tree_cons (NULL_TREE, integer_type_node, endlink));
9337
9338 tree int_ftype_void
36e8d515 9339 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9340
9341 tree v2si_ftype_pv2si_int
3fdaa45a 9342 = build_function_type (opaque_V2SI_type_node,
6035d635 9343 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9344 tree_cons (NULL_TREE, integer_type_node,
9345 endlink)));
9346
9347 tree v2si_ftype_puint_int
3fdaa45a 9348 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9349 tree_cons (NULL_TREE, puint_type_node,
9350 tree_cons (NULL_TREE, integer_type_node,
9351 endlink)));
9352
9353 tree v2si_ftype_pushort_int
3fdaa45a 9354 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9355 tree_cons (NULL_TREE, pushort_type_node,
9356 tree_cons (NULL_TREE, integer_type_node,
9357 endlink)));
9358
00332c9f
AH
9359 tree v2si_ftype_signed_char
9360 = build_function_type (opaque_V2SI_type_node,
9361 tree_cons (NULL_TREE, signed_char_type_node,
9362 endlink));
9363
a3170dc6
AH
9364 /* The initialization of the simple binary and unary builtins is
9365 done in rs6000_common_init_builtins, but we have to enable the
9366 mask bits here manually because we have run out of `target_flags'
9367 bits. We really need to redesign this mask business. */
9368
9369 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9370 ARRAY_SIZE (bdesc_2arg),
9371 SPE_BUILTIN_EVADDW,
9372 SPE_BUILTIN_EVXOR);
9373 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9374 ARRAY_SIZE (bdesc_1arg),
9375 SPE_BUILTIN_EVABS,
9376 SPE_BUILTIN_EVSUBFUSIAAW);
9377 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9378 ARRAY_SIZE (bdesc_spe_predicates),
9379 SPE_BUILTIN_EVCMPEQ,
9380 SPE_BUILTIN_EVFSTSTLT);
9381 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9382 ARRAY_SIZE (bdesc_spe_evsel),
9383 SPE_BUILTIN_EVSEL_CMPGTS,
9384 SPE_BUILTIN_EVSEL_FSTSTEQ);
9385
36252949
AH
9386 (*lang_hooks.decls.pushdecl)
9387 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9388 opaque_V2SI_type_node));
9389
a3170dc6 9390 /* Initialize irregular SPE builtins. */
f676971a 9391
a3170dc6
AH
9392 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9393 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9394 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9395 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9396 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9397 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9398 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9399 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9400 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9401 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9402 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9403 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9404 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9405 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9406 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9407 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9408 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9409 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9410
9411 /* Loads. */
9412 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9413 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9414 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9415 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9416 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9417 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9418 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9419 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9420 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9421 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9422 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9423 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9424 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9425 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9426 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9427 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9428 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9429 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9430 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9431 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9432 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9433 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9434
9435 /* Predicates. */
9436 d = (struct builtin_description *) bdesc_spe_predicates;
9437 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9438 {
9439 tree type;
9440
9441 switch (insn_data[d->icode].operand[1].mode)
9442 {
9443 case V2SImode:
9444 type = int_ftype_int_v2si_v2si;
9445 break;
9446 case V2SFmode:
9447 type = int_ftype_int_v2sf_v2sf;
9448 break;
9449 default:
37409796 9450 gcc_unreachable ();
a3170dc6
AH
9451 }
9452
9453 def_builtin (d->mask, d->name, type, d->code);
9454 }
9455
9456 /* Evsel predicates. */
9457 d = (struct builtin_description *) bdesc_spe_evsel;
9458 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9459 {
9460 tree type;
9461
9462 switch (insn_data[d->icode].operand[1].mode)
9463 {
9464 case V2SImode:
9465 type = v2si_ftype_4_v2si;
9466 break;
9467 case V2SFmode:
9468 type = v2sf_ftype_4_v2sf;
9469 break;
9470 default:
37409796 9471 gcc_unreachable ();
a3170dc6
AH
9472 }
9473
9474 def_builtin (d->mask, d->name, type, d->code);
9475 }
9476}
9477
96038623
DE
9478static void
9479paired_init_builtins (void)
9480{
23a651fc 9481 const struct builtin_description *d;
96038623
DE
9482 size_t i;
9483 tree endlink = void_list_node;
9484
9485 tree int_ftype_int_v2sf_v2sf
9486 = build_function_type
9487 (integer_type_node,
9488 tree_cons (NULL_TREE, integer_type_node,
9489 tree_cons (NULL_TREE, V2SF_type_node,
9490 tree_cons (NULL_TREE, V2SF_type_node,
9491 endlink))));
9492 tree pcfloat_type_node =
9493 build_pointer_type (build_qualified_type
9494 (float_type_node, TYPE_QUAL_CONST));
9495
9496 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9497 long_integer_type_node,
9498 pcfloat_type_node,
9499 NULL_TREE);
9500 tree void_ftype_v2sf_long_pcfloat =
9501 build_function_type_list (void_type_node,
9502 V2SF_type_node,
9503 long_integer_type_node,
9504 pcfloat_type_node,
9505 NULL_TREE);
9506
9507
9508 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9509 PAIRED_BUILTIN_LX);
9510
9511
9512 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9513 PAIRED_BUILTIN_STX);
9514
9515 /* Predicates. */
23a651fc 9516 d = bdesc_paired_preds;
96038623
DE
9517 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9518 {
9519 tree type;
9520
9521 switch (insn_data[d->icode].operand[1].mode)
9522 {
9523 case V2SFmode:
9524 type = int_ftype_int_v2sf_v2sf;
9525 break;
9526 default:
9527 gcc_unreachable ();
9528 }
9529
9530 def_builtin (d->mask, d->name, type, d->code);
9531 }
9532}
9533
a3170dc6 9534static void
863d938c 9535altivec_init_builtins (void)
a3170dc6 9536{
586de218
KG
9537 const struct builtin_description *d;
9538 const struct builtin_description_predicates *dp;
a3170dc6 9539 size_t i;
7a4eca66
DE
9540 tree ftype;
9541
a3170dc6
AH
9542 tree pfloat_type_node = build_pointer_type (float_type_node);
9543 tree pint_type_node = build_pointer_type (integer_type_node);
9544 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9545 tree pchar_type_node = build_pointer_type (char_type_node);
9546
9547 tree pvoid_type_node = build_pointer_type (void_type_node);
9548
0dbc3651
ZW
9549 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9550 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9551 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9552 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9553
9554 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9555
58646b77
PB
9556 tree int_ftype_opaque
9557 = build_function_type_list (integer_type_node,
9558 opaque_V4SI_type_node, NULL_TREE);
9559
9560 tree opaque_ftype_opaque_int
9561 = build_function_type_list (opaque_V4SI_type_node,
9562 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9563 tree opaque_ftype_opaque_opaque_int
9564 = build_function_type_list (opaque_V4SI_type_node,
9565 opaque_V4SI_type_node, opaque_V4SI_type_node,
9566 integer_type_node, NULL_TREE);
9567 tree int_ftype_int_opaque_opaque
9568 = build_function_type_list (integer_type_node,
9569 integer_type_node, opaque_V4SI_type_node,
9570 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9571 tree int_ftype_int_v4si_v4si
9572 = build_function_type_list (integer_type_node,
9573 integer_type_node, V4SI_type_node,
9574 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9575 tree v4sf_ftype_pcfloat
9576 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9577 tree void_ftype_pfloat_v4sf
b4de2f7d 9578 = build_function_type_list (void_type_node,
a3170dc6 9579 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9580 tree v4si_ftype_pcint
9581 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9582 tree void_ftype_pint_v4si
b4de2f7d
AH
9583 = build_function_type_list (void_type_node,
9584 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9585 tree v8hi_ftype_pcshort
9586 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9587 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9588 = build_function_type_list (void_type_node,
9589 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9590 tree v16qi_ftype_pcchar
9591 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9592 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9593 = build_function_type_list (void_type_node,
9594 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9595 tree void_ftype_v4si
b4de2f7d 9596 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9597 tree v8hi_ftype_void
9598 = build_function_type (V8HI_type_node, void_list_node);
9599 tree void_ftype_void
9600 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9601 tree void_ftype_int
9602 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9603
58646b77
PB
9604 tree opaque_ftype_long_pcvoid
9605 = build_function_type_list (opaque_V4SI_type_node,
9606 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9607 tree v16qi_ftype_long_pcvoid
a3170dc6 9608 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9609 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9610 tree v8hi_ftype_long_pcvoid
a3170dc6 9611 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9612 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9613 tree v4si_ftype_long_pcvoid
a3170dc6 9614 = build_function_type_list (V4SI_type_node,
b4a62fa0 9615 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9616
58646b77
PB
9617 tree void_ftype_opaque_long_pvoid
9618 = build_function_type_list (void_type_node,
9619 opaque_V4SI_type_node, long_integer_type_node,
9620 pvoid_type_node, NULL_TREE);
b4a62fa0 9621 tree void_ftype_v4si_long_pvoid
b4de2f7d 9622 = build_function_type_list (void_type_node,
b4a62fa0 9623 V4SI_type_node, long_integer_type_node,
b4de2f7d 9624 pvoid_type_node, NULL_TREE);
b4a62fa0 9625 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9626 = build_function_type_list (void_type_node,
b4a62fa0 9627 V16QI_type_node, long_integer_type_node,
b4de2f7d 9628 pvoid_type_node, NULL_TREE);
b4a62fa0 9629 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9630 = build_function_type_list (void_type_node,
b4a62fa0 9631 V8HI_type_node, long_integer_type_node,
b4de2f7d 9632 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9633 tree int_ftype_int_v8hi_v8hi
9634 = build_function_type_list (integer_type_node,
9635 integer_type_node, V8HI_type_node,
9636 V8HI_type_node, NULL_TREE);
9637 tree int_ftype_int_v16qi_v16qi
9638 = build_function_type_list (integer_type_node,
9639 integer_type_node, V16QI_type_node,
9640 V16QI_type_node, NULL_TREE);
9641 tree int_ftype_int_v4sf_v4sf
9642 = build_function_type_list (integer_type_node,
9643 integer_type_node, V4SF_type_node,
9644 V4SF_type_node, NULL_TREE);
9645 tree v4si_ftype_v4si
9646 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9647 tree v8hi_ftype_v8hi
9648 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9649 tree v16qi_ftype_v16qi
9650 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9651 tree v4sf_ftype_v4sf
9652 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9653 tree void_ftype_pcvoid_int_int
a3170dc6 9654 = build_function_type_list (void_type_node,
0dbc3651 9655 pcvoid_type_node, integer_type_node,
8bb418a3 9656 integer_type_node, NULL_TREE);
8bb418a3 9657
0dbc3651
ZW
9658 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9659 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9660 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9661 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9662 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9663 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9664 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9665 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9666 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9667 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9668 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9669 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9670 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9671 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9672 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9673 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9674 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9676 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9678 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9680 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9682 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9684 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9686 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9690 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9691 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9692 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9693 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9694 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9695 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9696 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9697 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9698 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9699 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9700 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9701 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9702 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9703 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9704
9705 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9706
9707 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9708 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9709 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9710 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9711 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9712 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9713 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9714 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9715 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9716 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9717
a3170dc6 9718 /* Add the DST variants. */
586de218 9719 d = bdesc_dst;
a3170dc6 9720 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9721 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9722
9723 /* Initialize the predicates. */
586de218 9724 dp = bdesc_altivec_preds;
a3170dc6
AH
9725 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9726 {
9727 enum machine_mode mode1;
9728 tree type;
58646b77
PB
9729 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9730 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9731
58646b77
PB
9732 if (is_overloaded)
9733 mode1 = VOIDmode;
9734 else
9735 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9736
9737 switch (mode1)
9738 {
58646b77
PB
9739 case VOIDmode:
9740 type = int_ftype_int_opaque_opaque;
9741 break;
a3170dc6
AH
9742 case V4SImode:
9743 type = int_ftype_int_v4si_v4si;
9744 break;
9745 case V8HImode:
9746 type = int_ftype_int_v8hi_v8hi;
9747 break;
9748 case V16QImode:
9749 type = int_ftype_int_v16qi_v16qi;
9750 break;
9751 case V4SFmode:
9752 type = int_ftype_int_v4sf_v4sf;
9753 break;
9754 default:
37409796 9755 gcc_unreachable ();
a3170dc6 9756 }
f676971a 9757
a3170dc6
AH
9758 def_builtin (dp->mask, dp->name, type, dp->code);
9759 }
9760
9761 /* Initialize the abs* operators. */
586de218 9762 d = bdesc_abs;
a3170dc6
AH
9763 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9764 {
9765 enum machine_mode mode0;
9766 tree type;
9767
9768 mode0 = insn_data[d->icode].operand[0].mode;
9769
9770 switch (mode0)
9771 {
9772 case V4SImode:
9773 type = v4si_ftype_v4si;
9774 break;
9775 case V8HImode:
9776 type = v8hi_ftype_v8hi;
9777 break;
9778 case V16QImode:
9779 type = v16qi_ftype_v16qi;
9780 break;
9781 case V4SFmode:
9782 type = v4sf_ftype_v4sf;
9783 break;
9784 default:
37409796 9785 gcc_unreachable ();
a3170dc6 9786 }
f676971a 9787
a3170dc6
AH
9788 def_builtin (d->mask, d->name, type, d->code);
9789 }
7ccf35ed 9790
13c62176
DN
9791 if (TARGET_ALTIVEC)
9792 {
9793 tree decl;
9794
9795 /* Initialize target builtin that implements
9796 targetm.vectorize.builtin_mask_for_load. */
9797
c79efc4d
RÁE
9798 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9799 v16qi_ftype_long_pcvoid,
9800 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9801 BUILT_IN_MD, NULL, NULL_TREE);
9802 TREE_READONLY (decl) = 1;
13c62176
DN
9803 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9804 altivec_builtin_mask_for_load = decl;
13c62176 9805 }
7a4eca66
DE
9806
9807 /* Access to the vec_init patterns. */
9808 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9809 integer_type_node, integer_type_node,
9810 integer_type_node, NULL_TREE);
9811 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9812 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9813
9814 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9815 short_integer_type_node,
9816 short_integer_type_node,
9817 short_integer_type_node,
9818 short_integer_type_node,
9819 short_integer_type_node,
9820 short_integer_type_node,
9821 short_integer_type_node, NULL_TREE);
9822 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9823 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9824
9825 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9826 char_type_node, char_type_node,
9827 char_type_node, char_type_node,
9828 char_type_node, char_type_node,
9829 char_type_node, char_type_node,
9830 char_type_node, char_type_node,
9831 char_type_node, char_type_node,
9832 char_type_node, char_type_node,
9833 char_type_node, NULL_TREE);
9834 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9835 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9836
9837 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9838 float_type_node, float_type_node,
9839 float_type_node, NULL_TREE);
9840 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9841 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9842
9843 /* Access to the vec_set patterns. */
9844 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9845 intSI_type_node,
9846 integer_type_node, NULL_TREE);
9847 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9848 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9849
9850 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9851 intHI_type_node,
9852 integer_type_node, NULL_TREE);
9853 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9854 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9855
9856 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9857 intQI_type_node,
9858 integer_type_node, NULL_TREE);
9859 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9860 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9861
9862 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9863 float_type_node,
9864 integer_type_node, NULL_TREE);
9865 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9866 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9867
9868 /* Access to the vec_extract patterns. */
9869 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9870 integer_type_node, NULL_TREE);
9871 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9872 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9873
9874 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9875 integer_type_node, NULL_TREE);
9876 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9877 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9878
9879 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9880 integer_type_node, NULL_TREE);
9881 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9882 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9883
9884 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9885 integer_type_node, NULL_TREE);
9886 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9887 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9888}
9889
9890static void
863d938c 9891rs6000_common_init_builtins (void)
a3170dc6 9892{
586de218 9893 const struct builtin_description *d;
a3170dc6
AH
9894 size_t i;
9895
96038623
DE
9896 tree v2sf_ftype_v2sf_v2sf_v2sf
9897 = build_function_type_list (V2SF_type_node,
9898 V2SF_type_node, V2SF_type_node,
9899 V2SF_type_node, NULL_TREE);
9900
a3170dc6
AH
9901 tree v4sf_ftype_v4sf_v4sf_v16qi
9902 = build_function_type_list (V4SF_type_node,
9903 V4SF_type_node, V4SF_type_node,
9904 V16QI_type_node, NULL_TREE);
9905 tree v4si_ftype_v4si_v4si_v16qi
9906 = build_function_type_list (V4SI_type_node,
9907 V4SI_type_node, V4SI_type_node,
9908 V16QI_type_node, NULL_TREE);
9909 tree v8hi_ftype_v8hi_v8hi_v16qi
9910 = build_function_type_list (V8HI_type_node,
9911 V8HI_type_node, V8HI_type_node,
9912 V16QI_type_node, NULL_TREE);
9913 tree v16qi_ftype_v16qi_v16qi_v16qi
9914 = build_function_type_list (V16QI_type_node,
9915 V16QI_type_node, V16QI_type_node,
9916 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9917 tree v4si_ftype_int
9918 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9919 tree v8hi_ftype_int
9920 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9921 tree v16qi_ftype_int
9922 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9923 tree v8hi_ftype_v16qi
9924 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9925 tree v4sf_ftype_v4sf
9926 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9927
9928 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9929 = build_function_type_list (opaque_V2SI_type_node,
9930 opaque_V2SI_type_node,
9931 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9932
96038623 9933 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9934 = build_function_type_list (opaque_V2SF_type_node,
9935 opaque_V2SF_type_node,
9936 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9937
96038623
DE
9938 tree v2sf_ftype_v2sf_v2sf
9939 = build_function_type_list (V2SF_type_node,
9940 V2SF_type_node,
9941 V2SF_type_node, NULL_TREE);
9942
9943
a3170dc6 9944 tree v2si_ftype_int_int
2abe3e28 9945 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9946 integer_type_node, integer_type_node,
9947 NULL_TREE);
9948
58646b77
PB
9949 tree opaque_ftype_opaque
9950 = build_function_type_list (opaque_V4SI_type_node,
9951 opaque_V4SI_type_node, NULL_TREE);
9952
a3170dc6 9953 tree v2si_ftype_v2si
2abe3e28
AH
9954 = build_function_type_list (opaque_V2SI_type_node,
9955 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9956
96038623 9957 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9958 = build_function_type_list (opaque_V2SF_type_node,
9959 opaque_V2SF_type_node, NULL_TREE);
f676971a 9960
96038623
DE
9961 tree v2sf_ftype_v2sf
9962 = build_function_type_list (V2SF_type_node,
9963 V2SF_type_node, NULL_TREE);
9964
a3170dc6 9965 tree v2sf_ftype_v2si
2abe3e28
AH
9966 = build_function_type_list (opaque_V2SF_type_node,
9967 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9968
9969 tree v2si_ftype_v2sf
2abe3e28
AH
9970 = build_function_type_list (opaque_V2SI_type_node,
9971 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9972
9973 tree v2si_ftype_v2si_char
2abe3e28
AH
9974 = build_function_type_list (opaque_V2SI_type_node,
9975 opaque_V2SI_type_node,
9976 char_type_node, NULL_TREE);
a3170dc6
AH
9977
9978 tree v2si_ftype_int_char
2abe3e28 9979 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9980 integer_type_node, char_type_node, NULL_TREE);
9981
9982 tree v2si_ftype_char
2abe3e28
AH
9983 = build_function_type_list (opaque_V2SI_type_node,
9984 char_type_node, NULL_TREE);
a3170dc6
AH
9985
9986 tree int_ftype_int_int
9987 = build_function_type_list (integer_type_node,
9988 integer_type_node, integer_type_node,
9989 NULL_TREE);
95385cbb 9990
58646b77
PB
9991 tree opaque_ftype_opaque_opaque
9992 = build_function_type_list (opaque_V4SI_type_node,
9993 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9994 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9995 = build_function_type_list (V4SI_type_node,
9996 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9997 tree v4sf_ftype_v4si_int
b4de2f7d 9998 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9999 V4SI_type_node, integer_type_node, NULL_TREE);
10000 tree v4si_ftype_v4sf_int
b4de2f7d 10001 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10002 V4SF_type_node, integer_type_node, NULL_TREE);
10003 tree v4si_ftype_v4si_int
b4de2f7d 10004 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10005 V4SI_type_node, integer_type_node, NULL_TREE);
10006 tree v8hi_ftype_v8hi_int
b4de2f7d 10007 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10008 V8HI_type_node, integer_type_node, NULL_TREE);
10009 tree v16qi_ftype_v16qi_int
b4de2f7d 10010 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10011 V16QI_type_node, integer_type_node, NULL_TREE);
10012 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10013 = build_function_type_list (V16QI_type_node,
10014 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10015 integer_type_node, NULL_TREE);
10016 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10017 = build_function_type_list (V8HI_type_node,
10018 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10019 integer_type_node, NULL_TREE);
10020 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10021 = build_function_type_list (V4SI_type_node,
10022 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10023 integer_type_node, NULL_TREE);
10024 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10025 = build_function_type_list (V4SF_type_node,
10026 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10027 integer_type_node, NULL_TREE);
0ac081f6 10028 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10029 = build_function_type_list (V4SF_type_node,
10030 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10031 tree opaque_ftype_opaque_opaque_opaque
10032 = build_function_type_list (opaque_V4SI_type_node,
10033 opaque_V4SI_type_node, opaque_V4SI_type_node,
10034 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10035 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10036 = build_function_type_list (V4SF_type_node,
10037 V4SF_type_node, V4SF_type_node,
10038 V4SI_type_node, NULL_TREE);
2212663f 10039 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10040 = build_function_type_list (V4SF_type_node,
10041 V4SF_type_node, V4SF_type_node,
10042 V4SF_type_node, NULL_TREE);
f676971a 10043 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10044 = build_function_type_list (V4SI_type_node,
10045 V4SI_type_node, V4SI_type_node,
10046 V4SI_type_node, NULL_TREE);
0ac081f6 10047 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10048 = build_function_type_list (V8HI_type_node,
10049 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10050 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10051 = build_function_type_list (V8HI_type_node,
10052 V8HI_type_node, V8HI_type_node,
10053 V8HI_type_node, NULL_TREE);
c4ad648e 10054 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10055 = build_function_type_list (V4SI_type_node,
10056 V8HI_type_node, V8HI_type_node,
10057 V4SI_type_node, NULL_TREE);
c4ad648e 10058 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10059 = build_function_type_list (V4SI_type_node,
10060 V16QI_type_node, V16QI_type_node,
10061 V4SI_type_node, NULL_TREE);
0ac081f6 10062 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10063 = build_function_type_list (V16QI_type_node,
10064 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10065 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10066 = build_function_type_list (V4SI_type_node,
10067 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10068 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10069 = build_function_type_list (V8HI_type_node,
10070 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10071 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10072 = build_function_type_list (V4SI_type_node,
10073 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10074 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10075 = build_function_type_list (V8HI_type_node,
10076 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10077 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10078 = build_function_type_list (V16QI_type_node,
10079 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10080 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10081 = build_function_type_list (V4SI_type_node,
10082 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10083 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10084 = build_function_type_list (V4SI_type_node,
10085 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10086 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10087 = build_function_type_list (V4SI_type_node,
10088 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10089 tree v4si_ftype_v8hi
10090 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10091 tree int_ftype_v4si_v4si
10092 = build_function_type_list (integer_type_node,
10093 V4SI_type_node, V4SI_type_node, NULL_TREE);
10094 tree int_ftype_v4sf_v4sf
10095 = build_function_type_list (integer_type_node,
10096 V4SF_type_node, V4SF_type_node, NULL_TREE);
10097 tree int_ftype_v16qi_v16qi
10098 = build_function_type_list (integer_type_node,
10099 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10100 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10101 = build_function_type_list (integer_type_node,
10102 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10103
6f317ef3 10104 /* Add the simple ternary operators. */
586de218 10105 d = bdesc_3arg;
ca7558fc 10106 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10107 {
2212663f
DB
10108 enum machine_mode mode0, mode1, mode2, mode3;
10109 tree type;
58646b77
PB
10110 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10111 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10112
58646b77
PB
10113 if (is_overloaded)
10114 {
10115 mode0 = VOIDmode;
10116 mode1 = VOIDmode;
10117 mode2 = VOIDmode;
10118 mode3 = VOIDmode;
10119 }
10120 else
10121 {
10122 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10123 continue;
f676971a 10124
58646b77
PB
10125 mode0 = insn_data[d->icode].operand[0].mode;
10126 mode1 = insn_data[d->icode].operand[1].mode;
10127 mode2 = insn_data[d->icode].operand[2].mode;
10128 mode3 = insn_data[d->icode].operand[3].mode;
10129 }
bb8df8a6 10130
2212663f
DB
10131 /* When all four are of the same mode. */
10132 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10133 {
10134 switch (mode0)
10135 {
58646b77
PB
10136 case VOIDmode:
10137 type = opaque_ftype_opaque_opaque_opaque;
10138 break;
617e0e1d
DB
10139 case V4SImode:
10140 type = v4si_ftype_v4si_v4si_v4si;
10141 break;
2212663f
DB
10142 case V4SFmode:
10143 type = v4sf_ftype_v4sf_v4sf_v4sf;
10144 break;
10145 case V8HImode:
10146 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10147 break;
2212663f
DB
10148 case V16QImode:
10149 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10150 break;
96038623
DE
10151 case V2SFmode:
10152 type = v2sf_ftype_v2sf_v2sf_v2sf;
10153 break;
2212663f 10154 default:
37409796 10155 gcc_unreachable ();
2212663f
DB
10156 }
10157 }
10158 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10159 {
2212663f
DB
10160 switch (mode0)
10161 {
10162 case V4SImode:
10163 type = v4si_ftype_v4si_v4si_v16qi;
10164 break;
10165 case V4SFmode:
10166 type = v4sf_ftype_v4sf_v4sf_v16qi;
10167 break;
10168 case V8HImode:
10169 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10170 break;
2212663f
DB
10171 case V16QImode:
10172 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10173 break;
2212663f 10174 default:
37409796 10175 gcc_unreachable ();
2212663f
DB
10176 }
10177 }
f676971a 10178 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10179 && mode3 == V4SImode)
24408032 10180 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10181 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10182 && mode3 == V4SImode)
24408032 10183 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10184 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10185 && mode3 == V4SImode)
24408032
AH
10186 type = v4sf_ftype_v4sf_v4sf_v4si;
10187
a7b376ee 10188 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10189 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10190 && mode3 == QImode)
b9e4e5d1 10191 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10192
a7b376ee 10193 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10194 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10195 && mode3 == QImode)
b9e4e5d1 10196 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10197
a7b376ee 10198 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10199 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10200 && mode3 == QImode)
b9e4e5d1 10201 type = v4si_ftype_v4si_v4si_int;
24408032 10202
a7b376ee 10203 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10204 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10205 && mode3 == QImode)
b9e4e5d1 10206 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10207
2212663f 10208 else
37409796 10209 gcc_unreachable ();
2212663f
DB
10210
10211 def_builtin (d->mask, d->name, type, d->code);
10212 }
10213
0ac081f6 10214 /* Add the simple binary operators. */
00b960c7 10215 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10216 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10217 {
10218 enum machine_mode mode0, mode1, mode2;
10219 tree type;
58646b77
PB
10220 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10221 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10222
58646b77
PB
10223 if (is_overloaded)
10224 {
10225 mode0 = VOIDmode;
10226 mode1 = VOIDmode;
10227 mode2 = VOIDmode;
10228 }
10229 else
bb8df8a6 10230 {
58646b77
PB
10231 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10232 continue;
f676971a 10233
58646b77
PB
10234 mode0 = insn_data[d->icode].operand[0].mode;
10235 mode1 = insn_data[d->icode].operand[1].mode;
10236 mode2 = insn_data[d->icode].operand[2].mode;
10237 }
0ac081f6
AH
10238
10239 /* When all three operands are of the same mode. */
10240 if (mode0 == mode1 && mode1 == mode2)
10241 {
10242 switch (mode0)
10243 {
58646b77
PB
10244 case VOIDmode:
10245 type = opaque_ftype_opaque_opaque;
10246 break;
0ac081f6
AH
10247 case V4SFmode:
10248 type = v4sf_ftype_v4sf_v4sf;
10249 break;
10250 case V4SImode:
10251 type = v4si_ftype_v4si_v4si;
10252 break;
10253 case V16QImode:
10254 type = v16qi_ftype_v16qi_v16qi;
10255 break;
10256 case V8HImode:
10257 type = v8hi_ftype_v8hi_v8hi;
10258 break;
a3170dc6
AH
10259 case V2SImode:
10260 type = v2si_ftype_v2si_v2si;
10261 break;
96038623
DE
10262 case V2SFmode:
10263 if (TARGET_PAIRED_FLOAT)
10264 type = v2sf_ftype_v2sf_v2sf;
10265 else
10266 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10267 break;
10268 case SImode:
10269 type = int_ftype_int_int;
10270 break;
0ac081f6 10271 default:
37409796 10272 gcc_unreachable ();
0ac081f6
AH
10273 }
10274 }
10275
10276 /* A few other combos we really don't want to do manually. */
10277
10278 /* vint, vfloat, vfloat. */
10279 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10280 type = v4si_ftype_v4sf_v4sf;
10281
10282 /* vshort, vchar, vchar. */
10283 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10284 type = v8hi_ftype_v16qi_v16qi;
10285
10286 /* vint, vshort, vshort. */
10287 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10288 type = v4si_ftype_v8hi_v8hi;
10289
10290 /* vshort, vint, vint. */
10291 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10292 type = v8hi_ftype_v4si_v4si;
10293
10294 /* vchar, vshort, vshort. */
10295 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10296 type = v16qi_ftype_v8hi_v8hi;
10297
10298 /* vint, vchar, vint. */
10299 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10300 type = v4si_ftype_v16qi_v4si;
10301
fa066a23
AH
10302 /* vint, vchar, vchar. */
10303 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10304 type = v4si_ftype_v16qi_v16qi;
10305
0ac081f6
AH
10306 /* vint, vshort, vint. */
10307 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10308 type = v4si_ftype_v8hi_v4si;
f676971a 10309
a7b376ee 10310 /* vint, vint, 5-bit literal. */
2212663f 10311 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10312 type = v4si_ftype_v4si_int;
f676971a 10313
a7b376ee 10314 /* vshort, vshort, 5-bit literal. */
2212663f 10315 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10316 type = v8hi_ftype_v8hi_int;
f676971a 10317
a7b376ee 10318 /* vchar, vchar, 5-bit literal. */
2212663f 10319 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10320 type = v16qi_ftype_v16qi_int;
0ac081f6 10321
a7b376ee 10322 /* vfloat, vint, 5-bit literal. */
617e0e1d 10323 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10324 type = v4sf_ftype_v4si_int;
f676971a 10325
a7b376ee 10326 /* vint, vfloat, 5-bit literal. */
617e0e1d 10327 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10328 type = v4si_ftype_v4sf_int;
617e0e1d 10329
a3170dc6
AH
10330 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10331 type = v2si_ftype_int_int;
10332
10333 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10334 type = v2si_ftype_v2si_char;
10335
10336 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10337 type = v2si_ftype_int_char;
10338
37409796 10339 else
0ac081f6 10340 {
37409796
NS
10341 /* int, x, x. */
10342 gcc_assert (mode0 == SImode);
0ac081f6
AH
10343 switch (mode1)
10344 {
10345 case V4SImode:
10346 type = int_ftype_v4si_v4si;
10347 break;
10348 case V4SFmode:
10349 type = int_ftype_v4sf_v4sf;
10350 break;
10351 case V16QImode:
10352 type = int_ftype_v16qi_v16qi;
10353 break;
10354 case V8HImode:
10355 type = int_ftype_v8hi_v8hi;
10356 break;
10357 default:
37409796 10358 gcc_unreachable ();
0ac081f6
AH
10359 }
10360 }
10361
2212663f
DB
10362 def_builtin (d->mask, d->name, type, d->code);
10363 }
24408032 10364
2212663f
DB
10365 /* Add the simple unary operators. */
10366 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10367 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10368 {
10369 enum machine_mode mode0, mode1;
10370 tree type;
58646b77
PB
10371 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10372 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10373
10374 if (is_overloaded)
10375 {
10376 mode0 = VOIDmode;
10377 mode1 = VOIDmode;
10378 }
10379 else
10380 {
10381 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10382 continue;
bb8df8a6 10383
58646b77
PB
10384 mode0 = insn_data[d->icode].operand[0].mode;
10385 mode1 = insn_data[d->icode].operand[1].mode;
10386 }
2212663f
DB
10387
10388 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10389 type = v4si_ftype_int;
2212663f 10390 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10391 type = v8hi_ftype_int;
2212663f 10392 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10393 type = v16qi_ftype_int;
58646b77
PB
10394 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10395 type = opaque_ftype_opaque;
617e0e1d
DB
10396 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10397 type = v4sf_ftype_v4sf;
20e26713
AH
10398 else if (mode0 == V8HImode && mode1 == V16QImode)
10399 type = v8hi_ftype_v16qi;
10400 else if (mode0 == V4SImode && mode1 == V8HImode)
10401 type = v4si_ftype_v8hi;
a3170dc6
AH
10402 else if (mode0 == V2SImode && mode1 == V2SImode)
10403 type = v2si_ftype_v2si;
10404 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10405 {
10406 if (TARGET_PAIRED_FLOAT)
10407 type = v2sf_ftype_v2sf;
10408 else
10409 type = v2sf_ftype_v2sf_spe;
10410 }
a3170dc6
AH
10411 else if (mode0 == V2SFmode && mode1 == V2SImode)
10412 type = v2sf_ftype_v2si;
10413 else if (mode0 == V2SImode && mode1 == V2SFmode)
10414 type = v2si_ftype_v2sf;
10415 else if (mode0 == V2SImode && mode1 == QImode)
10416 type = v2si_ftype_char;
2212663f 10417 else
37409796 10418 gcc_unreachable ();
2212663f 10419
0ac081f6
AH
10420 def_builtin (d->mask, d->name, type, d->code);
10421 }
10422}
10423
c15c90bb
ZW
10424static void
10425rs6000_init_libfuncs (void)
10426{
602ea4d3
JJ
10427 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10428 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10429 {
602ea4d3
JJ
10430 /* AIX library routines for float->int conversion. */
10431 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10432 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10433 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10434 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10435 }
c15c90bb 10436
602ea4d3 10437 if (!TARGET_IEEEQUAD)
98c41d98 10438 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10439 if (!TARGET_XL_COMPAT)
10440 {
10441 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10442 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10443 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10444 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10445
17caeff2 10446 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10447 {
10448 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10449 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10450 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10451 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10452 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10453 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10454 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10455
10456 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10457 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10458 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10459 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10460 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10461 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10462 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10463 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10464 }
b26941b4
JM
10465
10466 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10467 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10468 }
10469 else
10470 {
10471 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10472 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10473 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10474 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10475 }
c9034561 10476 else
c15c90bb 10477 {
c9034561 10478 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10479
10480 set_optab_libfunc (add_optab, TFmode, "_q_add");
10481 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10482 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10483 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10484 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10485 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10486 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10487
c9034561
ZW
10488 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10489 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10490 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10491 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10492 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10493 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10494
85363ca0
ZW
10495 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10496 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10497 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10498 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10499 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10500 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10501 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10502 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10503 }
10504}
fba73eb1
DE
10505
10506\f
10507/* Expand a block clear operation, and return 1 if successful. Return 0
10508 if we should let the compiler generate normal code.
10509
10510 operands[0] is the destination
10511 operands[1] is the length
57e84f18 10512 operands[3] is the alignment */
fba73eb1
DE
10513
10514int
10515expand_block_clear (rtx operands[])
10516{
10517 rtx orig_dest = operands[0];
10518 rtx bytes_rtx = operands[1];
57e84f18 10519 rtx align_rtx = operands[3];
5514620a
GK
10520 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10521 HOST_WIDE_INT align;
10522 HOST_WIDE_INT bytes;
fba73eb1
DE
10523 int offset;
10524 int clear_bytes;
5514620a 10525 int clear_step;
fba73eb1
DE
10526
10527 /* If this is not a fixed size move, just call memcpy */
10528 if (! constp)
10529 return 0;
10530
37409796
NS
10531 /* This must be a fixed size alignment */
10532 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10533 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10534
10535 /* Anything to clear? */
10536 bytes = INTVAL (bytes_rtx);
10537 if (bytes <= 0)
10538 return 1;
10539
5514620a
GK
10540 /* Use the builtin memset after a point, to avoid huge code bloat.
10541 When optimize_size, avoid any significant code bloat; calling
10542 memset is about 4 instructions, so allow for one instruction to
10543 load zero and three to do clearing. */
10544 if (TARGET_ALTIVEC && align >= 128)
10545 clear_step = 16;
10546 else if (TARGET_POWERPC64 && align >= 32)
10547 clear_step = 8;
21d818ff
NF
10548 else if (TARGET_SPE && align >= 64)
10549 clear_step = 8;
5514620a
GK
10550 else
10551 clear_step = 4;
fba73eb1 10552
5514620a
GK
10553 if (optimize_size && bytes > 3 * clear_step)
10554 return 0;
10555 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10556 return 0;
10557
10558 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10559 {
fba73eb1
DE
10560 enum machine_mode mode = BLKmode;
10561 rtx dest;
f676971a 10562
5514620a
GK
10563 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10564 {
10565 clear_bytes = 16;
10566 mode = V4SImode;
10567 }
21d818ff
NF
10568 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10569 {
10570 clear_bytes = 8;
10571 mode = V2SImode;
10572 }
5514620a 10573 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10574 /* 64-bit loads and stores require word-aligned
10575 displacements. */
10576 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10577 {
10578 clear_bytes = 8;
10579 mode = DImode;
fba73eb1 10580 }
5514620a 10581 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10582 { /* move 4 bytes */
10583 clear_bytes = 4;
10584 mode = SImode;
fba73eb1 10585 }
ec53fc93 10586 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10587 { /* move 2 bytes */
10588 clear_bytes = 2;
10589 mode = HImode;
fba73eb1
DE
10590 }
10591 else /* move 1 byte at a time */
10592 {
10593 clear_bytes = 1;
10594 mode = QImode;
fba73eb1 10595 }
f676971a 10596
fba73eb1 10597 dest = adjust_address (orig_dest, mode, offset);
f676971a 10598
5514620a 10599 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10600 }
10601
10602 return 1;
10603}
10604
35aff10b 10605\f
7e69e155
MM
10606/* Expand a block move operation, and return 1 if successful. Return 0
10607 if we should let the compiler generate normal code.
10608
10609 operands[0] is the destination
10610 operands[1] is the source
10611 operands[2] is the length
10612 operands[3] is the alignment */
10613
3933e0e1
MM
10614#define MAX_MOVE_REG 4
10615
7e69e155 10616int
a2369ed3 10617expand_block_move (rtx operands[])
7e69e155 10618{
b6c9286a
MM
10619 rtx orig_dest = operands[0];
10620 rtx orig_src = operands[1];
7e69e155 10621 rtx bytes_rtx = operands[2];
7e69e155 10622 rtx align_rtx = operands[3];
3933e0e1 10623 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10624 int align;
3933e0e1
MM
10625 int bytes;
10626 int offset;
7e69e155 10627 int move_bytes;
cabfd258
GK
10628 rtx stores[MAX_MOVE_REG];
10629 int num_reg = 0;
7e69e155 10630
3933e0e1 10631 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10632 if (! constp)
3933e0e1
MM
10633 return 0;
10634
37409796
NS
10635 /* This must be a fixed size alignment */
10636 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10637 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10638
7e69e155 10639 /* Anything to move? */
3933e0e1
MM
10640 bytes = INTVAL (bytes_rtx);
10641 if (bytes <= 0)
7e69e155
MM
10642 return 1;
10643
ea9982a8 10644 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10645 reg_parm_stack_space. */
ea9982a8 10646 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10647 return 0;
10648
cabfd258 10649 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10650 {
cabfd258 10651 union {
70128ad9 10652 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10653 rtx (*mov) (rtx, rtx);
cabfd258
GK
10654 } gen_func;
10655 enum machine_mode mode = BLKmode;
10656 rtx src, dest;
f676971a 10657
5514620a
GK
10658 /* Altivec first, since it will be faster than a string move
10659 when it applies, and usually not significantly larger. */
10660 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10661 {
10662 move_bytes = 16;
10663 mode = V4SImode;
10664 gen_func.mov = gen_movv4si;
10665 }
21d818ff
NF
10666 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10667 {
10668 move_bytes = 8;
10669 mode = V2SImode;
10670 gen_func.mov = gen_movv2si;
10671 }
5514620a 10672 else if (TARGET_STRING
cabfd258
GK
10673 && bytes > 24 /* move up to 32 bytes at a time */
10674 && ! fixed_regs[5]
10675 && ! fixed_regs[6]
10676 && ! fixed_regs[7]
10677 && ! fixed_regs[8]
10678 && ! fixed_regs[9]
10679 && ! fixed_regs[10]
10680 && ! fixed_regs[11]
10681 && ! fixed_regs[12])
7e69e155 10682 {
cabfd258 10683 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10684 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10685 }
10686 else if (TARGET_STRING
10687 && bytes > 16 /* move up to 24 bytes at a time */
10688 && ! fixed_regs[5]
10689 && ! fixed_regs[6]
10690 && ! fixed_regs[7]
10691 && ! fixed_regs[8]
10692 && ! fixed_regs[9]
10693 && ! fixed_regs[10])
10694 {
10695 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10696 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10697 }
10698 else if (TARGET_STRING
10699 && bytes > 8 /* move up to 16 bytes at a time */
10700 && ! fixed_regs[5]
10701 && ! fixed_regs[6]
10702 && ! fixed_regs[7]
10703 && ! fixed_regs[8])
10704 {
10705 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10706 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10707 }
10708 else if (bytes >= 8 && TARGET_POWERPC64
10709 /* 64-bit loads and stores require word-aligned
10710 displacements. */
fba73eb1 10711 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10712 {
10713 move_bytes = 8;
10714 mode = DImode;
10715 gen_func.mov = gen_movdi;
10716 }
10717 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10718 { /* move up to 8 bytes at a time */
10719 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10720 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10721 }
cd7d9ca4 10722 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10723 { /* move 4 bytes */
10724 move_bytes = 4;
10725 mode = SImode;
10726 gen_func.mov = gen_movsi;
10727 }
ec53fc93 10728 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10729 { /* move 2 bytes */
10730 move_bytes = 2;
10731 mode = HImode;
10732 gen_func.mov = gen_movhi;
10733 }
10734 else if (TARGET_STRING && bytes > 1)
10735 { /* move up to 4 bytes at a time */
10736 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10737 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10738 }
10739 else /* move 1 byte at a time */
10740 {
10741 move_bytes = 1;
10742 mode = QImode;
10743 gen_func.mov = gen_movqi;
10744 }
f676971a 10745
cabfd258
GK
10746 src = adjust_address (orig_src, mode, offset);
10747 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10748
10749 if (mode != BLKmode)
cabfd258
GK
10750 {
10751 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10752
cabfd258
GK
10753 emit_insn ((*gen_func.mov) (tmp_reg, src));
10754 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10755 }
3933e0e1 10756
cabfd258
GK
10757 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10758 {
10759 int i;
10760 for (i = 0; i < num_reg; i++)
10761 emit_insn (stores[i]);
10762 num_reg = 0;
10763 }
35aff10b 10764
cabfd258 10765 if (mode == BLKmode)
7e69e155 10766 {
70128ad9 10767 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10768 patterns require zero offset. */
10769 if (!REG_P (XEXP (src, 0)))
b6c9286a 10770 {
cabfd258
GK
10771 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10772 src = replace_equiv_address (src, src_reg);
b6c9286a 10773 }
cabfd258 10774 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10775
cabfd258 10776 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10777 {
cabfd258
GK
10778 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10779 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10780 }
cabfd258 10781 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10782
70128ad9 10783 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10784 GEN_INT (move_bytes & 31),
10785 align_rtx));
7e69e155 10786 }
7e69e155
MM
10787 }
10788
10789 return 1;
10790}
10791
d62294f5 10792\f
9caa3eb2
DE
10793/* Return a string to perform a load_multiple operation.
10794 operands[0] is the vector.
10795 operands[1] is the source address.
10796 operands[2] is the first destination register. */
10797
10798const char *
a2369ed3 10799rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10800{
10801 /* We have to handle the case where the pseudo used to contain the address
10802 is assigned to one of the output registers. */
10803 int i, j;
10804 int words = XVECLEN (operands[0], 0);
10805 rtx xop[10];
10806
10807 if (XVECLEN (operands[0], 0) == 1)
10808 return "{l|lwz} %2,0(%1)";
10809
10810 for (i = 0; i < words; i++)
10811 if (refers_to_regno_p (REGNO (operands[2]) + i,
10812 REGNO (operands[2]) + i + 1, operands[1], 0))
10813 {
10814 if (i == words-1)
10815 {
10816 xop[0] = GEN_INT (4 * (words-1));
10817 xop[1] = operands[1];
10818 xop[2] = operands[2];
10819 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10820 return "";
10821 }
10822 else if (i == 0)
10823 {
10824 xop[0] = GEN_INT (4 * (words-1));
10825 xop[1] = operands[1];
10826 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10827 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10828 return "";
10829 }
10830 else
10831 {
10832 for (j = 0; j < words; j++)
10833 if (j != i)
10834 {
10835 xop[0] = GEN_INT (j * 4);
10836 xop[1] = operands[1];
10837 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10838 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10839 }
10840 xop[0] = GEN_INT (i * 4);
10841 xop[1] = operands[1];
10842 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10843 return "";
10844 }
10845 }
10846
10847 return "{lsi|lswi} %2,%1,%N0";
10848}
10849
9878760c 10850\f
a4f6c312
SS
10851/* A validation routine: say whether CODE, a condition code, and MODE
10852 match. The other alternatives either don't make sense or should
10853 never be generated. */
39a10a29 10854
48d72335 10855void
a2369ed3 10856validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10857{
37409796
NS
10858 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10859 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10860 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10861
10862 /* These don't make sense. */
37409796
NS
10863 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10864 || mode != CCUNSmode);
39a10a29 10865
37409796
NS
10866 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10867 || mode == CCUNSmode);
39a10a29 10868
37409796
NS
10869 gcc_assert (mode == CCFPmode
10870 || (code != ORDERED && code != UNORDERED
10871 && code != UNEQ && code != LTGT
10872 && code != UNGT && code != UNLT
10873 && code != UNGE && code != UNLE));
f676971a
EC
10874
10875 /* These should never be generated except for
bc9ec0e0 10876 flag_finite_math_only. */
37409796
NS
10877 gcc_assert (mode != CCFPmode
10878 || flag_finite_math_only
10879 || (code != LE && code != GE
10880 && code != UNEQ && code != LTGT
10881 && code != UNGT && code != UNLT));
39a10a29
GK
10882
10883 /* These are invalid; the information is not there. */
37409796 10884 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10885}
10886
9878760c
RK
10887\f
10888/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10889 mask required to convert the result of a rotate insn into a shift
b1765bde 10890 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10891
10892int
a2369ed3 10893includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10894{
e2c953b6
DE
10895 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10896
10897 shift_mask <<= INTVAL (shiftop);
9878760c 10898
b1765bde 10899 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10900}
10901
10902/* Similar, but for right shift. */
10903
10904int
a2369ed3 10905includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10906{
a7653a2c 10907 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10908
10909 shift_mask >>= INTVAL (shiftop);
10910
b1765bde 10911 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10912}
10913
c5059423
AM
10914/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10915 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10916 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10917
10918int
a2369ed3 10919includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10920{
c5059423
AM
10921 if (GET_CODE (andop) == CONST_INT)
10922 {
02071907 10923 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10924
c5059423 10925 c = INTVAL (andop);
02071907 10926 if (c == 0 || c == ~0)
c5059423 10927 return 0;
e2c953b6 10928
02071907 10929 shift_mask = ~0;
c5059423
AM
10930 shift_mask <<= INTVAL (shiftop);
10931
b6d08ca1 10932 /* Find the least significant one bit. */
c5059423
AM
10933 lsb = c & -c;
10934
10935 /* It must coincide with the LSB of the shift mask. */
10936 if (-lsb != shift_mask)
10937 return 0;
e2c953b6 10938
c5059423
AM
10939 /* Invert to look for the next transition (if any). */
10940 c = ~c;
10941
10942 /* Remove the low group of ones (originally low group of zeros). */
10943 c &= -lsb;
10944
10945 /* Again find the lsb, and check we have all 1's above. */
10946 lsb = c & -c;
10947 return c == -lsb;
10948 }
10949 else if (GET_CODE (andop) == CONST_DOUBLE
10950 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10951 {
02071907
AM
10952 HOST_WIDE_INT low, high, lsb;
10953 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10954
10955 low = CONST_DOUBLE_LOW (andop);
10956 if (HOST_BITS_PER_WIDE_INT < 64)
10957 high = CONST_DOUBLE_HIGH (andop);
10958
10959 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10960 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10961 return 0;
10962
10963 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10964 {
02071907 10965 shift_mask_high = ~0;
c5059423
AM
10966 if (INTVAL (shiftop) > 32)
10967 shift_mask_high <<= INTVAL (shiftop) - 32;
10968
10969 lsb = high & -high;
10970
10971 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10972 return 0;
10973
10974 high = ~high;
10975 high &= -lsb;
10976
10977 lsb = high & -high;
10978 return high == -lsb;
10979 }
10980
02071907 10981 shift_mask_low = ~0;
c5059423
AM
10982 shift_mask_low <<= INTVAL (shiftop);
10983
10984 lsb = low & -low;
10985
10986 if (-lsb != shift_mask_low)
10987 return 0;
10988
10989 if (HOST_BITS_PER_WIDE_INT < 64)
10990 high = ~high;
10991 low = ~low;
10992 low &= -lsb;
10993
10994 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10995 {
10996 lsb = high & -high;
10997 return high == -lsb;
10998 }
10999
11000 lsb = low & -low;
11001 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11002 }
11003 else
11004 return 0;
11005}
e2c953b6 11006
c5059423
AM
11007/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11008 to perform a left shift. It must have SHIFTOP or more least
c1207243 11009 significant 0's, with the remainder of the word 1's. */
e2c953b6 11010
c5059423 11011int
a2369ed3 11012includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11013{
e2c953b6 11014 if (GET_CODE (andop) == CONST_INT)
c5059423 11015 {
02071907 11016 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11017
02071907 11018 shift_mask = ~0;
c5059423
AM
11019 shift_mask <<= INTVAL (shiftop);
11020 c = INTVAL (andop);
11021
c1207243 11022 /* Find the least significant one bit. */
c5059423
AM
11023 lsb = c & -c;
11024
11025 /* It must be covered by the shift mask.
a4f6c312 11026 This test also rejects c == 0. */
c5059423
AM
11027 if ((lsb & shift_mask) == 0)
11028 return 0;
11029
11030 /* Check we have all 1's above the transition, and reject all 1's. */
11031 return c == -lsb && lsb != 1;
11032 }
11033 else if (GET_CODE (andop) == CONST_DOUBLE
11034 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11035 {
02071907 11036 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11037
11038 low = CONST_DOUBLE_LOW (andop);
11039
11040 if (HOST_BITS_PER_WIDE_INT < 64)
11041 {
02071907 11042 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11043
11044 high = CONST_DOUBLE_HIGH (andop);
11045
11046 if (low == 0)
11047 {
02071907 11048 shift_mask_high = ~0;
c5059423
AM
11049 if (INTVAL (shiftop) > 32)
11050 shift_mask_high <<= INTVAL (shiftop) - 32;
11051
11052 lsb = high & -high;
11053
11054 if ((lsb & shift_mask_high) == 0)
11055 return 0;
11056
11057 return high == -lsb;
11058 }
11059 if (high != ~0)
11060 return 0;
11061 }
11062
02071907 11063 shift_mask_low = ~0;
c5059423
AM
11064 shift_mask_low <<= INTVAL (shiftop);
11065
11066 lsb = low & -low;
11067
11068 if ((lsb & shift_mask_low) == 0)
11069 return 0;
11070
11071 return low == -lsb && lsb != 1;
11072 }
e2c953b6 11073 else
c5059423 11074 return 0;
9878760c 11075}
35068b43 11076
11ac38b2
DE
11077/* Return 1 if operands will generate a valid arguments to rlwimi
11078instruction for insert with right shift in 64-bit mode. The mask may
11079not start on the first bit or stop on the last bit because wrap-around
11080effects of instruction do not correspond to semantics of RTL insn. */
11081
11082int
11083insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11084{
429ec7dc
DE
11085 if (INTVAL (startop) > 32
11086 && INTVAL (startop) < 64
11087 && INTVAL (sizeop) > 1
11088 && INTVAL (sizeop) + INTVAL (startop) < 64
11089 && INTVAL (shiftop) > 0
11090 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11091 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11092 return 1;
11093
11094 return 0;
11095}
11096
35068b43 11097/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11098 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11099
11100int
a2369ed3 11101registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11102{
11103 /* We might have been passed a SUBREG. */
f676971a 11104 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11105 return 0;
f676971a 11106
90f81f99
AP
11107 /* We might have been passed non floating point registers. */
11108 if (!FP_REGNO_P (REGNO (reg1))
11109 || !FP_REGNO_P (REGNO (reg2)))
11110 return 0;
35068b43
RK
11111
11112 return (REGNO (reg1) == REGNO (reg2) - 1);
11113}
11114
a4f6c312
SS
11115/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11116 addr1 and addr2 must be in consecutive memory locations
11117 (addr2 == addr1 + 8). */
35068b43
RK
11118
11119int
90f81f99 11120mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11121{
90f81f99 11122 rtx addr1, addr2;
bb8df8a6
EC
11123 unsigned int reg1, reg2;
11124 int offset1, offset2;
35068b43 11125
90f81f99
AP
11126 /* The mems cannot be volatile. */
11127 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11128 return 0;
f676971a 11129
90f81f99
AP
11130 addr1 = XEXP (mem1, 0);
11131 addr2 = XEXP (mem2, 0);
11132
35068b43
RK
11133 /* Extract an offset (if used) from the first addr. */
11134 if (GET_CODE (addr1) == PLUS)
11135 {
11136 /* If not a REG, return zero. */
11137 if (GET_CODE (XEXP (addr1, 0)) != REG)
11138 return 0;
11139 else
11140 {
c4ad648e 11141 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11142 /* The offset must be constant! */
11143 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11144 return 0;
11145 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11146 }
11147 }
11148 else if (GET_CODE (addr1) != REG)
11149 return 0;
11150 else
11151 {
11152 reg1 = REGNO (addr1);
11153 /* This was a simple (mem (reg)) expression. Offset is 0. */
11154 offset1 = 0;
11155 }
11156
bb8df8a6
EC
11157 /* And now for the second addr. */
11158 if (GET_CODE (addr2) == PLUS)
11159 {
11160 /* If not a REG, return zero. */
11161 if (GET_CODE (XEXP (addr2, 0)) != REG)
11162 return 0;
11163 else
11164 {
11165 reg2 = REGNO (XEXP (addr2, 0));
11166 /* The offset must be constant. */
11167 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11168 return 0;
11169 offset2 = INTVAL (XEXP (addr2, 1));
11170 }
11171 }
11172 else if (GET_CODE (addr2) != REG)
35068b43 11173 return 0;
bb8df8a6
EC
11174 else
11175 {
11176 reg2 = REGNO (addr2);
11177 /* This was a simple (mem (reg)) expression. Offset is 0. */
11178 offset2 = 0;
11179 }
35068b43 11180
bb8df8a6
EC
11181 /* Both of these must have the same base register. */
11182 if (reg1 != reg2)
35068b43
RK
11183 return 0;
11184
11185 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11186 if (offset2 != offset1 + 8)
35068b43
RK
11187 return 0;
11188
11189 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11190 instructions. */
11191 return 1;
11192}
9878760c 11193\f
e41b2a33
PB
11194
11195rtx
11196rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11197{
11198 static bool eliminated = false;
11199 if (mode != SDmode)
11200 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11201 else
11202 {
11203 rtx mem = cfun->machine->sdmode_stack_slot;
11204 gcc_assert (mem != NULL_RTX);
11205
11206 if (!eliminated)
11207 {
11208 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11209 cfun->machine->sdmode_stack_slot = mem;
11210 eliminated = true;
11211 }
11212 return mem;
11213 }
11214}
11215
11216static tree
11217rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11218{
11219 /* Don't walk into types. */
11220 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11221 {
11222 *walk_subtrees = 0;
11223 return NULL_TREE;
11224 }
11225
11226 switch (TREE_CODE (*tp))
11227 {
11228 case VAR_DECL:
11229 case PARM_DECL:
11230 case FIELD_DECL:
11231 case RESULT_DECL:
11232 case REAL_CST:
11233 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11234 return *tp;
11235 break;
11236 default:
11237 break;
11238 }
11239
11240 return NULL_TREE;
11241}
11242
11243
11244/* Allocate a 64-bit stack slot to be used for copying SDmode
11245 values through if this function has any SDmode references. */
11246
11247static void
11248rs6000_alloc_sdmode_stack_slot (void)
11249{
11250 tree t;
11251 basic_block bb;
11252 block_stmt_iterator bsi;
11253
11254 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11255
11256 FOR_EACH_BB (bb)
11257 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11258 {
11259 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11260 rs6000_check_sdmode, NULL);
11261 if (ret)
11262 {
11263 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11264 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11265 SDmode, 0);
11266 return;
11267 }
11268 }
11269
11270 /* Check for any SDmode parameters of the function. */
11271 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11272 {
11273 if (TREE_TYPE (t) == error_mark_node)
11274 continue;
11275
11276 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11277 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11278 {
11279 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11280 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11281 SDmode, 0);
11282 return;
11283 }
11284 }
11285}
11286
11287static void
11288rs6000_instantiate_decls (void)
11289{
11290 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11291 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11292}
11293
9878760c
RK
11294/* Return the register class of a scratch register needed to copy IN into
11295 or out of a register in CLASS in MODE. If it can be done directly,
11296 NO_REGS is returned. */
11297
11298enum reg_class
3c4774e0
R
11299rs6000_secondary_reload_class (enum reg_class class,
11300 enum machine_mode mode ATTRIBUTE_UNUSED,
11301 rtx in)
9878760c 11302{
5accd822 11303 int regno;
9878760c 11304
ab82a49f
AP
11305 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11306#if TARGET_MACHO
c4ad648e 11307 && MACHOPIC_INDIRECT
ab82a49f 11308#endif
c4ad648e 11309 ))
46fad5b7
DJ
11310 {
11311 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11312 other than BASE_REGS for TARGET_ELF. So indicate that a
11313 register from BASE_REGS is needed as an intermediate
11314 register.
f676971a 11315
46fad5b7
DJ
11316 On Darwin, pic addresses require a load from memory, which
11317 needs a base register. */
11318 if (class != BASE_REGS
c4ad648e
AM
11319 && (GET_CODE (in) == SYMBOL_REF
11320 || GET_CODE (in) == HIGH
11321 || GET_CODE (in) == LABEL_REF
11322 || GET_CODE (in) == CONST))
11323 return BASE_REGS;
46fad5b7 11324 }
e7b7998a 11325
5accd822
DE
11326 if (GET_CODE (in) == REG)
11327 {
11328 regno = REGNO (in);
11329 if (regno >= FIRST_PSEUDO_REGISTER)
11330 {
11331 regno = true_regnum (in);
11332 if (regno >= FIRST_PSEUDO_REGISTER)
11333 regno = -1;
11334 }
11335 }
11336 else if (GET_CODE (in) == SUBREG)
11337 {
11338 regno = true_regnum (in);
11339 if (regno >= FIRST_PSEUDO_REGISTER)
11340 regno = -1;
11341 }
11342 else
11343 regno = -1;
11344
9878760c
RK
11345 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11346 into anything. */
11347 if (class == GENERAL_REGS || class == BASE_REGS
11348 || (regno >= 0 && INT_REGNO_P (regno)))
11349 return NO_REGS;
11350
11351 /* Constants, memory, and FP registers can go into FP registers. */
11352 if ((regno == -1 || FP_REGNO_P (regno))
11353 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11354 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11355
0ac081f6
AH
11356 /* Memory, and AltiVec registers can go into AltiVec registers. */
11357 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11358 && class == ALTIVEC_REGS)
11359 return NO_REGS;
11360
9878760c
RK
11361 /* We can copy among the CR registers. */
11362 if ((class == CR_REGS || class == CR0_REGS)
11363 && regno >= 0 && CR_REGNO_P (regno))
11364 return NO_REGS;
11365
11366 /* Otherwise, we need GENERAL_REGS. */
11367 return GENERAL_REGS;
11368}
11369\f
11370/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11371 know this is a valid comparison.
9878760c
RK
11372
11373 SCC_P is 1 if this is for an scc. That means that %D will have been
11374 used instead of %C, so the bits will be in different places.
11375
b4ac57ab 11376 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11377
11378int
a2369ed3 11379ccr_bit (rtx op, int scc_p)
9878760c
RK
11380{
11381 enum rtx_code code = GET_CODE (op);
11382 enum machine_mode cc_mode;
11383 int cc_regnum;
11384 int base_bit;
9ebbca7d 11385 rtx reg;
9878760c 11386
ec8e098d 11387 if (!COMPARISON_P (op))
9878760c
RK
11388 return -1;
11389
9ebbca7d
GK
11390 reg = XEXP (op, 0);
11391
37409796 11392 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11393
11394 cc_mode = GET_MODE (reg);
11395 cc_regnum = REGNO (reg);
11396 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11397
39a10a29 11398 validate_condition_mode (code, cc_mode);
c5defebb 11399
b7053a3f
GK
11400 /* When generating a sCOND operation, only positive conditions are
11401 allowed. */
37409796
NS
11402 gcc_assert (!scc_p
11403 || code == EQ || code == GT || code == LT || code == UNORDERED
11404 || code == GTU || code == LTU);
f676971a 11405
9878760c
RK
11406 switch (code)
11407 {
11408 case NE:
11409 return scc_p ? base_bit + 3 : base_bit + 2;
11410 case EQ:
11411 return base_bit + 2;
1c882ea4 11412 case GT: case GTU: case UNLE:
9878760c 11413 return base_bit + 1;
1c882ea4 11414 case LT: case LTU: case UNGE:
9878760c 11415 return base_bit;
1c882ea4
GK
11416 case ORDERED: case UNORDERED:
11417 return base_bit + 3;
9878760c
RK
11418
11419 case GE: case GEU:
39a10a29 11420 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11421 unordered position. So test that bit. For integer, this is ! LT
11422 unless this is an scc insn. */
39a10a29 11423 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11424
11425 case LE: case LEU:
39a10a29 11426 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11427
9878760c 11428 default:
37409796 11429 gcc_unreachable ();
9878760c
RK
11430 }
11431}
1ff7789b 11432\f
8d30c4ee 11433/* Return the GOT register. */
1ff7789b 11434
9390387d 11435rtx
a2369ed3 11436rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11437{
a4f6c312
SS
11438 /* The second flow pass currently (June 1999) can't update
11439 regs_ever_live without disturbing other parts of the compiler, so
11440 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11441 if (!can_create_pseudo_p ()
11442 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11443 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11444
8d30c4ee 11445 current_function_uses_pic_offset_table = 1;
3cb999d8 11446
1ff7789b
MM
11447 return pic_offset_table_rtx;
11448}
a7df97e6 11449\f
e2500fed
GK
11450/* Function to init struct machine_function.
11451 This will be called, via a pointer variable,
11452 from push_function_context. */
a7df97e6 11453
e2500fed 11454static struct machine_function *
863d938c 11455rs6000_init_machine_status (void)
a7df97e6 11456{
e2500fed 11457 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11458}
9878760c 11459\f
0ba1b2ff
AM
11460/* These macros test for integers and extract the low-order bits. */
11461#define INT_P(X) \
11462((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11463 && GET_MODE (X) == VOIDmode)
11464
11465#define INT_LOWPART(X) \
11466 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11467
11468int
a2369ed3 11469extract_MB (rtx op)
0ba1b2ff
AM
11470{
11471 int i;
11472 unsigned long val = INT_LOWPART (op);
11473
11474 /* If the high bit is zero, the value is the first 1 bit we find
11475 from the left. */
11476 if ((val & 0x80000000) == 0)
11477 {
37409796 11478 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11479
11480 i = 1;
11481 while (((val <<= 1) & 0x80000000) == 0)
11482 ++i;
11483 return i;
11484 }
11485
11486 /* If the high bit is set and the low bit is not, or the mask is all
11487 1's, the value is zero. */
11488 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11489 return 0;
11490
11491 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11492 from the right. */
11493 i = 31;
11494 while (((val >>= 1) & 1) != 0)
11495 --i;
11496
11497 return i;
11498}
11499
11500int
a2369ed3 11501extract_ME (rtx op)
0ba1b2ff
AM
11502{
11503 int i;
11504 unsigned long val = INT_LOWPART (op);
11505
11506 /* If the low bit is zero, the value is the first 1 bit we find from
11507 the right. */
11508 if ((val & 1) == 0)
11509 {
37409796 11510 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11511
11512 i = 30;
11513 while (((val >>= 1) & 1) == 0)
11514 --i;
11515
11516 return i;
11517 }
11518
11519 /* If the low bit is set and the high bit is not, or the mask is all
11520 1's, the value is 31. */
11521 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11522 return 31;
11523
11524 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11525 from the left. */
11526 i = 0;
11527 while (((val <<= 1) & 0x80000000) != 0)
11528 ++i;
11529
11530 return i;
11531}
11532
c4501e62
JJ
11533/* Locate some local-dynamic symbol still in use by this function
11534 so that we can print its name in some tls_ld pattern. */
11535
11536static const char *
863d938c 11537rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11538{
11539 rtx insn;
11540
11541 if (cfun->machine->some_ld_name)
11542 return cfun->machine->some_ld_name;
11543
11544 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11545 if (INSN_P (insn)
11546 && for_each_rtx (&PATTERN (insn),
11547 rs6000_get_some_local_dynamic_name_1, 0))
11548 return cfun->machine->some_ld_name;
11549
37409796 11550 gcc_unreachable ();
c4501e62
JJ
11551}
11552
11553/* Helper function for rs6000_get_some_local_dynamic_name. */
11554
11555static int
a2369ed3 11556rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11557{
11558 rtx x = *px;
11559
11560 if (GET_CODE (x) == SYMBOL_REF)
11561 {
11562 const char *str = XSTR (x, 0);
11563 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11564 {
11565 cfun->machine->some_ld_name = str;
11566 return 1;
11567 }
11568 }
11569
11570 return 0;
11571}
11572
85b776df
AM
11573/* Write out a function code label. */
11574
11575void
11576rs6000_output_function_entry (FILE *file, const char *fname)
11577{
11578 if (fname[0] != '.')
11579 {
11580 switch (DEFAULT_ABI)
11581 {
11582 default:
37409796 11583 gcc_unreachable ();
85b776df
AM
11584
11585 case ABI_AIX:
11586 if (DOT_SYMBOLS)
11587 putc ('.', file);
11588 else
11589 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11590 break;
11591
11592 case ABI_V4:
11593 case ABI_DARWIN:
11594 break;
11595 }
11596 }
11597 if (TARGET_AIX)
11598 RS6000_OUTPUT_BASENAME (file, fname);
11599 else
11600 assemble_name (file, fname);
11601}
11602
9878760c
RK
11603/* Print an operand. Recognize special options, documented below. */
11604
38c1f2d7 11605#if TARGET_ELF
d9407988 11606#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11607#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11608#else
11609#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11610#define SMALL_DATA_REG 0
ba5e43aa
MM
11611#endif
11612
9878760c 11613void
a2369ed3 11614print_operand (FILE *file, rtx x, int code)
9878760c
RK
11615{
11616 int i;
a260abc9 11617 HOST_WIDE_INT val;
0ba1b2ff 11618 unsigned HOST_WIDE_INT uval;
9878760c
RK
11619
11620 switch (code)
11621 {
a8b3aeda 11622 case '.':
a85d226b
RK
11623 /* Write out an instruction after the call which may be replaced
11624 with glue code by the loader. This depends on the AIX version. */
11625 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11626 return;
11627
81eace42
GK
11628 /* %a is output_address. */
11629
9854d9ed
RK
11630 case 'A':
11631 /* If X is a constant integer whose low-order 5 bits are zero,
11632 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11633 in the AIX assembler where "sri" with a zero shift count
20e26713 11634 writes a trash instruction. */
9854d9ed 11635 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11636 putc ('l', file);
9854d9ed 11637 else
76229ac8 11638 putc ('r', file);
9854d9ed
RK
11639 return;
11640
11641 case 'b':
e2c953b6
DE
11642 /* If constant, low-order 16 bits of constant, unsigned.
11643 Otherwise, write normally. */
11644 if (INT_P (x))
11645 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11646 else
11647 print_operand (file, x, 0);
cad12a8d
RK
11648 return;
11649
a260abc9
DE
11650 case 'B':
11651 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11652 for 64-bit mask direction. */
9390387d 11653 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11654 return;
a260abc9 11655
81eace42
GK
11656 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11657 output_operand. */
11658
423c1189
AH
11659 case 'c':
11660 /* X is a CR register. Print the number of the GT bit of the CR. */
11661 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11662 output_operand_lossage ("invalid %%E value");
11663 else
11664 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11665 return;
11666
11667 case 'D':
cef6b86c 11668 /* Like 'J' but get to the GT bit only. */
37409796 11669 gcc_assert (GET_CODE (x) == REG);
423c1189 11670
cef6b86c
EB
11671 /* Bit 1 is GT bit. */
11672 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11673
cef6b86c
EB
11674 /* Add one for shift count in rlinm for scc. */
11675 fprintf (file, "%d", i + 1);
423c1189
AH
11676 return;
11677
9854d9ed 11678 case 'E':
39a10a29 11679 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11680 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11681 output_operand_lossage ("invalid %%E value");
78fbdbf7 11682 else
39a10a29 11683 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11684 return;
9854d9ed
RK
11685
11686 case 'f':
11687 /* X is a CR register. Print the shift count needed to move it
11688 to the high-order four bits. */
11689 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11690 output_operand_lossage ("invalid %%f value");
11691 else
9ebbca7d 11692 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11693 return;
11694
11695 case 'F':
11696 /* Similar, but print the count for the rotate in the opposite
11697 direction. */
11698 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11699 output_operand_lossage ("invalid %%F value");
11700 else
9ebbca7d 11701 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11702 return;
11703
11704 case 'G':
11705 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11706 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11707 if (GET_CODE (x) != CONST_INT)
11708 output_operand_lossage ("invalid %%G value");
11709 else if (INTVAL (x) >= 0)
76229ac8 11710 putc ('z', file);
9854d9ed 11711 else
76229ac8 11712 putc ('m', file);
9854d9ed 11713 return;
e2c953b6 11714
9878760c 11715 case 'h':
a4f6c312
SS
11716 /* If constant, output low-order five bits. Otherwise, write
11717 normally. */
9878760c 11718 if (INT_P (x))
5f59ecb7 11719 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11720 else
11721 print_operand (file, x, 0);
11722 return;
11723
64305719 11724 case 'H':
a4f6c312
SS
11725 /* If constant, output low-order six bits. Otherwise, write
11726 normally. */
64305719 11727 if (INT_P (x))
5f59ecb7 11728 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11729 else
11730 print_operand (file, x, 0);
11731 return;
11732
9854d9ed
RK
11733 case 'I':
11734 /* Print `i' if this is a constant, else nothing. */
9878760c 11735 if (INT_P (x))
76229ac8 11736 putc ('i', file);
9878760c
RK
11737 return;
11738
9854d9ed
RK
11739 case 'j':
11740 /* Write the bit number in CCR for jump. */
11741 i = ccr_bit (x, 0);
11742 if (i == -1)
11743 output_operand_lossage ("invalid %%j code");
9878760c 11744 else
9854d9ed 11745 fprintf (file, "%d", i);
9878760c
RK
11746 return;
11747
9854d9ed
RK
11748 case 'J':
11749 /* Similar, but add one for shift count in rlinm for scc and pass
11750 scc flag to `ccr_bit'. */
11751 i = ccr_bit (x, 1);
11752 if (i == -1)
11753 output_operand_lossage ("invalid %%J code");
11754 else
a0466a68
RK
11755 /* If we want bit 31, write a shift count of zero, not 32. */
11756 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11757 return;
11758
9854d9ed
RK
11759 case 'k':
11760 /* X must be a constant. Write the 1's complement of the
11761 constant. */
9878760c 11762 if (! INT_P (x))
9854d9ed 11763 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11764 else
11765 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11766 return;
11767
81eace42 11768 case 'K':
9ebbca7d
GK
11769 /* X must be a symbolic constant on ELF. Write an
11770 expression suitable for an 'addi' that adds in the low 16
11771 bits of the MEM. */
11772 if (GET_CODE (x) != CONST)
11773 {
11774 print_operand_address (file, x);
11775 fputs ("@l", file);
11776 }
11777 else
11778 {
11779 if (GET_CODE (XEXP (x, 0)) != PLUS
11780 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11781 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11782 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11783 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11784 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11785 fputs ("@l", file);
ed8d2920
MM
11786 /* For GNU as, there must be a non-alphanumeric character
11787 between 'l' and the number. The '-' is added by
11788 print_operand() already. */
11789 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11790 fputs ("+", file);
9ebbca7d
GK
11791 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11792 }
81eace42
GK
11793 return;
11794
11795 /* %l is output_asm_label. */
9ebbca7d 11796
9854d9ed
RK
11797 case 'L':
11798 /* Write second word of DImode or DFmode reference. Works on register
11799 or non-indexed memory only. */
11800 if (GET_CODE (x) == REG)
fb5c67a7 11801 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11802 else if (GET_CODE (x) == MEM)
11803 {
11804 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11805 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11806 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11807 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11808 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11809 UNITS_PER_WORD));
6fb5fa3c
DB
11810 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11811 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11812 UNITS_PER_WORD));
9854d9ed 11813 else
d7624dc0
RK
11814 output_address (XEXP (adjust_address_nv (x, SImode,
11815 UNITS_PER_WORD),
11816 0));
ed8908e7 11817
ba5e43aa 11818 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11819 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11820 reg_names[SMALL_DATA_REG]);
9854d9ed 11821 }
9878760c 11822 return;
f676971a 11823
9878760c
RK
11824 case 'm':
11825 /* MB value for a mask operand. */
b1765bde 11826 if (! mask_operand (x, SImode))
9878760c
RK
11827 output_operand_lossage ("invalid %%m value");
11828
0ba1b2ff 11829 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11830 return;
11831
11832 case 'M':
11833 /* ME value for a mask operand. */
b1765bde 11834 if (! mask_operand (x, SImode))
a260abc9 11835 output_operand_lossage ("invalid %%M value");
9878760c 11836
0ba1b2ff 11837 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11838 return;
11839
81eace42
GK
11840 /* %n outputs the negative of its operand. */
11841
9878760c
RK
11842 case 'N':
11843 /* Write the number of elements in the vector times 4. */
11844 if (GET_CODE (x) != PARALLEL)
11845 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11846 else
11847 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11848 return;
11849
11850 case 'O':
11851 /* Similar, but subtract 1 first. */
11852 if (GET_CODE (x) != PARALLEL)
1427100a 11853 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11854 else
11855 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11856 return;
11857
9854d9ed
RK
11858 case 'p':
11859 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11860 if (! INT_P (x)
2bfcf297 11861 || INT_LOWPART (x) < 0
9854d9ed
RK
11862 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11863 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11864 else
11865 fprintf (file, "%d", i);
9854d9ed
RK
11866 return;
11867
9878760c
RK
11868 case 'P':
11869 /* The operand must be an indirect memory reference. The result
8bb418a3 11870 is the register name. */
9878760c
RK
11871 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11872 || REGNO (XEXP (x, 0)) >= 32)
11873 output_operand_lossage ("invalid %%P value");
e2c953b6 11874 else
fb5c67a7 11875 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11876 return;
11877
dfbdccdb
GK
11878 case 'q':
11879 /* This outputs the logical code corresponding to a boolean
11880 expression. The expression may have one or both operands
39a10a29 11881 negated (if one, only the first one). For condition register
c4ad648e
AM
11882 logical operations, it will also treat the negated
11883 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11884 {
63bc1d05 11885 const char *const *t = 0;
dfbdccdb
GK
11886 const char *s;
11887 enum rtx_code code = GET_CODE (x);
11888 static const char * const tbl[3][3] = {
11889 { "and", "andc", "nor" },
11890 { "or", "orc", "nand" },
11891 { "xor", "eqv", "xor" } };
11892
11893 if (code == AND)
11894 t = tbl[0];
11895 else if (code == IOR)
11896 t = tbl[1];
11897 else if (code == XOR)
11898 t = tbl[2];
11899 else
11900 output_operand_lossage ("invalid %%q value");
11901
11902 if (GET_CODE (XEXP (x, 0)) != NOT)
11903 s = t[0];
11904 else
11905 {
11906 if (GET_CODE (XEXP (x, 1)) == NOT)
11907 s = t[2];
11908 else
11909 s = t[1];
11910 }
f676971a 11911
dfbdccdb
GK
11912 fputs (s, file);
11913 }
11914 return;
11915
2c4a9cff
DE
11916 case 'Q':
11917 if (TARGET_MFCRF)
3b6ce0af 11918 fputc (',', file);
5efb1046 11919 /* FALLTHRU */
2c4a9cff
DE
11920 else
11921 return;
11922
9854d9ed
RK
11923 case 'R':
11924 /* X is a CR register. Print the mask for `mtcrf'. */
11925 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11926 output_operand_lossage ("invalid %%R value");
11927 else
9ebbca7d 11928 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11929 return;
9854d9ed
RK
11930
11931 case 's':
11932 /* Low 5 bits of 32 - value */
11933 if (! INT_P (x))
11934 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11935 else
11936 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11937 return;
9854d9ed 11938
a260abc9 11939 case 'S':
0ba1b2ff 11940 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11941 CONST_INT 32-bit mask is considered sign-extended so any
11942 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11943 if (! mask64_operand (x, DImode))
a260abc9
DE
11944 output_operand_lossage ("invalid %%S value");
11945
0ba1b2ff 11946 uval = INT_LOWPART (x);
a260abc9 11947
0ba1b2ff 11948 if (uval & 1) /* Clear Left */
a260abc9 11949 {
f099d360
GK
11950#if HOST_BITS_PER_WIDE_INT > 64
11951 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11952#endif
0ba1b2ff 11953 i = 64;
a260abc9 11954 }
0ba1b2ff 11955 else /* Clear Right */
a260abc9 11956 {
0ba1b2ff 11957 uval = ~uval;
f099d360
GK
11958#if HOST_BITS_PER_WIDE_INT > 64
11959 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11960#endif
0ba1b2ff 11961 i = 63;
a260abc9 11962 }
0ba1b2ff
AM
11963 while (uval != 0)
11964 --i, uval >>= 1;
37409796 11965 gcc_assert (i >= 0);
0ba1b2ff
AM
11966 fprintf (file, "%d", i);
11967 return;
a260abc9 11968
a3170dc6
AH
11969 case 't':
11970 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11971 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11972
11973 /* Bit 3 is OV bit. */
11974 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11975
11976 /* If we want bit 31, write a shift count of zero, not 32. */
11977 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11978 return;
11979
cccf3bdc
DE
11980 case 'T':
11981 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11982 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11983 && REGNO (x) != CTR_REGNO))
cccf3bdc 11984 output_operand_lossage ("invalid %%T value");
1de43f85 11985 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11986 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11987 else
11988 fputs ("ctr", file);
11989 return;
11990
9854d9ed 11991 case 'u':
802a0058 11992 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11993 if (! INT_P (x))
11994 output_operand_lossage ("invalid %%u value");
e2c953b6 11995 else
f676971a 11996 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11997 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11998 return;
11999
802a0058
MM
12000 case 'v':
12001 /* High-order 16 bits of constant for use in signed operand. */
12002 if (! INT_P (x))
12003 output_operand_lossage ("invalid %%v value");
e2c953b6 12004 else
134c32f6
DE
12005 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12006 (INT_LOWPART (x) >> 16) & 0xffff);
12007 return;
802a0058 12008
9854d9ed
RK
12009 case 'U':
12010 /* Print `u' if this has an auto-increment or auto-decrement. */
12011 if (GET_CODE (x) == MEM
12012 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12013 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12014 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12015 putc ('u', file);
9854d9ed 12016 return;
9878760c 12017
e0cd0770
JC
12018 case 'V':
12019 /* Print the trap code for this operand. */
12020 switch (GET_CODE (x))
12021 {
12022 case EQ:
12023 fputs ("eq", file); /* 4 */
12024 break;
12025 case NE:
12026 fputs ("ne", file); /* 24 */
12027 break;
12028 case LT:
12029 fputs ("lt", file); /* 16 */
12030 break;
12031 case LE:
12032 fputs ("le", file); /* 20 */
12033 break;
12034 case GT:
12035 fputs ("gt", file); /* 8 */
12036 break;
12037 case GE:
12038 fputs ("ge", file); /* 12 */
12039 break;
12040 case LTU:
12041 fputs ("llt", file); /* 2 */
12042 break;
12043 case LEU:
12044 fputs ("lle", file); /* 6 */
12045 break;
12046 case GTU:
12047 fputs ("lgt", file); /* 1 */
12048 break;
12049 case GEU:
12050 fputs ("lge", file); /* 5 */
12051 break;
12052 default:
37409796 12053 gcc_unreachable ();
e0cd0770
JC
12054 }
12055 break;
12056
9854d9ed
RK
12057 case 'w':
12058 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12059 normally. */
12060 if (INT_P (x))
f676971a 12061 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12062 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12063 else
12064 print_operand (file, x, 0);
9878760c
RK
12065 return;
12066
9854d9ed 12067 case 'W':
e2c953b6 12068 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12069 val = (GET_CODE (x) == CONST_INT
12070 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12071
12072 if (val < 0)
12073 i = -1;
9854d9ed 12074 else
e2c953b6
DE
12075 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12076 if ((val <<= 1) < 0)
12077 break;
12078
12079#if HOST_BITS_PER_WIDE_INT == 32
12080 if (GET_CODE (x) == CONST_INT && i >= 0)
12081 i += 32; /* zero-extend high-part was all 0's */
12082 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12083 {
12084 val = CONST_DOUBLE_LOW (x);
12085
37409796
NS
12086 gcc_assert (val);
12087 if (val < 0)
e2c953b6
DE
12088 --i;
12089 else
12090 for ( ; i < 64; i++)
12091 if ((val <<= 1) < 0)
12092 break;
12093 }
12094#endif
12095
12096 fprintf (file, "%d", i + 1);
9854d9ed 12097 return;
9878760c 12098
9854d9ed
RK
12099 case 'X':
12100 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12101 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12102 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12103 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12104 putc ('x', file);
9854d9ed 12105 return;
9878760c 12106
9854d9ed
RK
12107 case 'Y':
12108 /* Like 'L', for third word of TImode */
12109 if (GET_CODE (x) == REG)
fb5c67a7 12110 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12111 else if (GET_CODE (x) == MEM)
9878760c 12112 {
9854d9ed
RK
12113 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12114 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12115 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12116 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12117 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12118 else
d7624dc0 12119 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12120 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12121 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12122 reg_names[SMALL_DATA_REG]);
9878760c
RK
12123 }
12124 return;
f676971a 12125
9878760c 12126 case 'z':
b4ac57ab
RS
12127 /* X is a SYMBOL_REF. Write out the name preceded by a
12128 period and without any trailing data in brackets. Used for function
4d30c363
MM
12129 names. If we are configured for System V (or the embedded ABI) on
12130 the PowerPC, do not emit the period, since those systems do not use
12131 TOCs and the like. */
37409796 12132 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12133
c4ad648e
AM
12134 /* Mark the decl as referenced so that cgraph will output the
12135 function. */
9bf6462a 12136 if (SYMBOL_REF_DECL (x))
c4ad648e 12137 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12138
85b776df 12139 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12140 if (TARGET_MACHO)
12141 {
12142 const char *name = XSTR (x, 0);
a031e781 12143#if TARGET_MACHO
3b48085e 12144 if (MACHOPIC_INDIRECT
11abc112
MM
12145 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12146 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12147#endif
12148 assemble_name (file, name);
12149 }
85b776df 12150 else if (!DOT_SYMBOLS)
9739c90c 12151 assemble_name (file, XSTR (x, 0));
85b776df
AM
12152 else
12153 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12154 return;
12155
9854d9ed
RK
12156 case 'Z':
12157 /* Like 'L', for last word of TImode. */
12158 if (GET_CODE (x) == REG)
fb5c67a7 12159 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12160 else if (GET_CODE (x) == MEM)
12161 {
12162 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12163 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12164 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12165 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12166 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12167 else
d7624dc0 12168 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12169 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12170 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12171 reg_names[SMALL_DATA_REG]);
9854d9ed 12172 }
5c23c401 12173 return;
0ac081f6 12174
a3170dc6 12175 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12176 case 'y':
12177 {
12178 rtx tmp;
12179
37409796 12180 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12181
12182 tmp = XEXP (x, 0);
12183
90d3ff1c 12184 /* Ugly hack because %y is overloaded. */
8ef65e3d 12185 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12186 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12187 || GET_MODE (x) == TFmode
12188 || GET_MODE (x) == TImode))
a3170dc6
AH
12189 {
12190 /* Handle [reg]. */
12191 if (GET_CODE (tmp) == REG)
12192 {
12193 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12194 break;
12195 }
12196 /* Handle [reg+UIMM]. */
12197 else if (GET_CODE (tmp) == PLUS &&
12198 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12199 {
12200 int x;
12201
37409796 12202 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12203
12204 x = INTVAL (XEXP (tmp, 1));
12205 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12206 break;
12207 }
12208
12209 /* Fall through. Must be [reg+reg]. */
12210 }
850e8d3d
DN
12211 if (TARGET_ALTIVEC
12212 && GET_CODE (tmp) == AND
12213 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12214 && INTVAL (XEXP (tmp, 1)) == -16)
12215 tmp = XEXP (tmp, 0);
0ac081f6 12216 if (GET_CODE (tmp) == REG)
c62f2db5 12217 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12218 else
0ac081f6 12219 {
37409796 12220 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
12221 && REG_P (XEXP (tmp, 0))
12222 && REG_P (XEXP (tmp, 1)));
bb8df8a6 12223
0ac081f6
AH
12224 if (REGNO (XEXP (tmp, 0)) == 0)
12225 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12226 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12227 else
12228 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12229 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12230 }
0ac081f6
AH
12231 break;
12232 }
f676971a 12233
9878760c
RK
12234 case 0:
12235 if (GET_CODE (x) == REG)
12236 fprintf (file, "%s", reg_names[REGNO (x)]);
12237 else if (GET_CODE (x) == MEM)
12238 {
12239 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12240 know the width from the mode. */
12241 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12242 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12243 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12244 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12245 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12246 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12247 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12248 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12249 else
a54d04b7 12250 output_address (XEXP (x, 0));
9878760c
RK
12251 }
12252 else
a54d04b7 12253 output_addr_const (file, x);
a85d226b 12254 return;
9878760c 12255
c4501e62
JJ
12256 case '&':
12257 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12258 return;
12259
9878760c
RK
12260 default:
12261 output_operand_lossage ("invalid %%xn code");
12262 }
12263}
12264\f
12265/* Print the address of an operand. */
12266
12267void
a2369ed3 12268print_operand_address (FILE *file, rtx x)
9878760c
RK
12269{
12270 if (GET_CODE (x) == REG)
4697a36c 12271 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12272 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12273 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12274 {
12275 output_addr_const (file, x);
ba5e43aa 12276 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12277 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12278 reg_names[SMALL_DATA_REG]);
37409796
NS
12279 else
12280 gcc_assert (!TARGET_TOC);
9878760c
RK
12281 }
12282 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12283 {
9024f4b8 12284 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12285 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12286 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12287 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12288 else
4697a36c
MM
12289 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12290 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12291 }
12292 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12293 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12294 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12295#if TARGET_ELF
12296 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12297 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12298 {
12299 output_addr_const (file, XEXP (x, 1));
12300 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12301 }
c859cda6
DJ
12302#endif
12303#if TARGET_MACHO
12304 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12305 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12306 {
12307 fprintf (file, "lo16(");
12308 output_addr_const (file, XEXP (x, 1));
12309 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12310 }
3cb999d8 12311#endif
4d588c14 12312 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12313 {
2bfcf297 12314 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12315 {
2bfcf297
DB
12316 rtx contains_minus = XEXP (x, 1);
12317 rtx minus, symref;
12318 const char *name;
f676971a 12319
9ebbca7d 12320 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12321 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12322 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12323 contains_minus = XEXP (contains_minus, 0);
12324
2bfcf297
DB
12325 minus = XEXP (contains_minus, 0);
12326 symref = XEXP (minus, 0);
12327 XEXP (contains_minus, 0) = symref;
12328 if (TARGET_ELF)
12329 {
12330 char *newname;
12331
12332 name = XSTR (symref, 0);
12333 newname = alloca (strlen (name) + sizeof ("@toc"));
12334 strcpy (newname, name);
12335 strcat (newname, "@toc");
12336 XSTR (symref, 0) = newname;
12337 }
12338 output_addr_const (file, XEXP (x, 1));
12339 if (TARGET_ELF)
12340 XSTR (symref, 0) = name;
9ebbca7d
GK
12341 XEXP (contains_minus, 0) = minus;
12342 }
12343 else
12344 output_addr_const (file, XEXP (x, 1));
12345
12346 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12347 }
9878760c 12348 else
37409796 12349 gcc_unreachable ();
9878760c
RK
12350}
12351\f
88cad84b 12352/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12353 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12354 is defined. It also needs to handle DI-mode objects on 64-bit
12355 targets. */
12356
12357static bool
a2369ed3 12358rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12359{
f4f4921e 12360#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12361 /* Special handling for SI values. */
84dcde01 12362 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12363 {
301d03af 12364 static int recurse = 0;
f676971a 12365
301d03af
RS
12366 /* For -mrelocatable, we mark all addresses that need to be fixed up
12367 in the .fixup section. */
12368 if (TARGET_RELOCATABLE
d6b5193b
RS
12369 && in_section != toc_section
12370 && in_section != text_section
4325ca90 12371 && !unlikely_text_section_p (in_section)
301d03af
RS
12372 && !recurse
12373 && GET_CODE (x) != CONST_INT
12374 && GET_CODE (x) != CONST_DOUBLE
12375 && CONSTANT_P (x))
12376 {
12377 char buf[256];
12378
12379 recurse = 1;
12380 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12381 fixuplabelno++;
12382 ASM_OUTPUT_LABEL (asm_out_file, buf);
12383 fprintf (asm_out_file, "\t.long\t(");
12384 output_addr_const (asm_out_file, x);
12385 fprintf (asm_out_file, ")@fixup\n");
12386 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12387 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12388 fprintf (asm_out_file, "\t.long\t");
12389 assemble_name (asm_out_file, buf);
12390 fprintf (asm_out_file, "\n\t.previous\n");
12391 recurse = 0;
12392 return true;
12393 }
12394 /* Remove initial .'s to turn a -mcall-aixdesc function
12395 address into the address of the descriptor, not the function
12396 itself. */
12397 else if (GET_CODE (x) == SYMBOL_REF
12398 && XSTR (x, 0)[0] == '.'
12399 && DEFAULT_ABI == ABI_AIX)
12400 {
12401 const char *name = XSTR (x, 0);
12402 while (*name == '.')
12403 name++;
12404
12405 fprintf (asm_out_file, "\t.long\t%s\n", name);
12406 return true;
12407 }
12408 }
f4f4921e 12409#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12410 return default_assemble_integer (x, size, aligned_p);
12411}
93638d7a
AM
12412
12413#ifdef HAVE_GAS_HIDDEN
12414/* Emit an assembler directive to set symbol visibility for DECL to
12415 VISIBILITY_TYPE. */
12416
5add3202 12417static void
a2369ed3 12418rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12419{
93638d7a
AM
12420 /* Functions need to have their entry point symbol visibility set as
12421 well as their descriptor symbol visibility. */
85b776df
AM
12422 if (DEFAULT_ABI == ABI_AIX
12423 && DOT_SYMBOLS
12424 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12425 {
25fdb4dc 12426 static const char * const visibility_types[] = {
c4ad648e 12427 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12428 };
12429
12430 const char *name, *type;
93638d7a
AM
12431
12432 name = ((* targetm.strip_name_encoding)
12433 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12434 type = visibility_types[vis];
93638d7a 12435
25fdb4dc
RH
12436 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12437 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12438 }
25fdb4dc
RH
12439 else
12440 default_assemble_visibility (decl, vis);
93638d7a
AM
12441}
12442#endif
301d03af 12443\f
39a10a29 12444enum rtx_code
a2369ed3 12445rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12446{
12447 /* Reversal of FP compares takes care -- an ordered compare
12448 becomes an unordered compare and vice versa. */
f676971a 12449 if (mode == CCFPmode
bc9ec0e0
GK
12450 && (!flag_finite_math_only
12451 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12452 || code == UNEQ || code == LTGT))
bab6226b 12453 return reverse_condition_maybe_unordered (code);
39a10a29 12454 else
bab6226b 12455 return reverse_condition (code);
39a10a29
GK
12456}
12457
39a10a29
GK
12458/* Generate a compare for CODE. Return a brand-new rtx that
12459 represents the result of the compare. */
a4f6c312 12460
39a10a29 12461static rtx
a2369ed3 12462rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12463{
12464 enum machine_mode comp_mode;
12465 rtx compare_result;
12466
12467 if (rs6000_compare_fp_p)
12468 comp_mode = CCFPmode;
12469 else if (code == GTU || code == LTU
c4ad648e 12470 || code == GEU || code == LEU)
39a10a29 12471 comp_mode = CCUNSmode;
60934f9c
NS
12472 else if ((code == EQ || code == NE)
12473 && GET_CODE (rs6000_compare_op0) == SUBREG
12474 && GET_CODE (rs6000_compare_op1) == SUBREG
12475 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12476 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12477 /* These are unsigned values, perhaps there will be a later
12478 ordering compare that can be shared with this one.
12479 Unfortunately we cannot detect the signedness of the operands
12480 for non-subregs. */
12481 comp_mode = CCUNSmode;
39a10a29
GK
12482 else
12483 comp_mode = CCmode;
12484
12485 /* First, the compare. */
12486 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12487
cef6b86c 12488 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12489 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12490 && rs6000_compare_fp_p)
a3170dc6 12491 {
64022b5d 12492 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12493 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12494
12495 if (op_mode == VOIDmode)
12496 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12497
cef6b86c
EB
12498 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12499 This explains the following mess. */
423c1189 12500
a3170dc6
AH
12501 switch (code)
12502 {
423c1189 12503 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12504 switch (op_mode)
12505 {
12506 case SFmode:
12507 cmp = flag_unsafe_math_optimizations
12508 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12509 rs6000_compare_op1)
12510 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12511 rs6000_compare_op1);
12512 break;
12513
12514 case DFmode:
12515 cmp = flag_unsafe_math_optimizations
12516 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12517 rs6000_compare_op1)
12518 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12519 rs6000_compare_op1);
12520 break;
12521
17caeff2
JM
12522 case TFmode:
12523 cmp = flag_unsafe_math_optimizations
12524 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12525 rs6000_compare_op1)
12526 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12527 rs6000_compare_op1);
12528 break;
12529
37409796
NS
12530 default:
12531 gcc_unreachable ();
12532 }
a3170dc6 12533 break;
bb8df8a6 12534
423c1189 12535 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12536 switch (op_mode)
12537 {
12538 case SFmode:
12539 cmp = flag_unsafe_math_optimizations
12540 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12541 rs6000_compare_op1)
12542 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12543 rs6000_compare_op1);
12544 break;
bb8df8a6 12545
37409796
NS
12546 case DFmode:
12547 cmp = flag_unsafe_math_optimizations
12548 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12549 rs6000_compare_op1)
12550 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12551 rs6000_compare_op1);
12552 break;
12553
17caeff2
JM
12554 case TFmode:
12555 cmp = flag_unsafe_math_optimizations
12556 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12557 rs6000_compare_op1)
12558 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12559 rs6000_compare_op1);
12560 break;
12561
37409796
NS
12562 default:
12563 gcc_unreachable ();
12564 }
a3170dc6 12565 break;
bb8df8a6 12566
423c1189 12567 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12568 switch (op_mode)
12569 {
12570 case SFmode:
12571 cmp = flag_unsafe_math_optimizations
12572 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12573 rs6000_compare_op1)
12574 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12575 rs6000_compare_op1);
12576 break;
bb8df8a6 12577
37409796
NS
12578 case DFmode:
12579 cmp = flag_unsafe_math_optimizations
12580 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12581 rs6000_compare_op1)
12582 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12583 rs6000_compare_op1);
12584 break;
12585
17caeff2
JM
12586 case TFmode:
12587 cmp = flag_unsafe_math_optimizations
12588 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12589 rs6000_compare_op1)
12590 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12591 rs6000_compare_op1);
12592 break;
12593
37409796
NS
12594 default:
12595 gcc_unreachable ();
12596 }
a3170dc6 12597 break;
4d4cbc0e 12598 default:
37409796 12599 gcc_unreachable ();
a3170dc6
AH
12600 }
12601
12602 /* Synthesize LE and GE from LT/GT || EQ. */
12603 if (code == LE || code == GE || code == LEU || code == GEU)
12604 {
a3170dc6
AH
12605 emit_insn (cmp);
12606
12607 switch (code)
12608 {
12609 case LE: code = LT; break;
12610 case GE: code = GT; break;
12611 case LEU: code = LT; break;
12612 case GEU: code = GT; break;
37409796 12613 default: gcc_unreachable ();
a3170dc6
AH
12614 }
12615
a3170dc6
AH
12616 compare_result2 = gen_reg_rtx (CCFPmode);
12617
12618 /* Do the EQ. */
37409796
NS
12619 switch (op_mode)
12620 {
12621 case SFmode:
12622 cmp = flag_unsafe_math_optimizations
12623 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12624 rs6000_compare_op1)
12625 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12626 rs6000_compare_op1);
12627 break;
12628
12629 case DFmode:
12630 cmp = flag_unsafe_math_optimizations
12631 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12632 rs6000_compare_op1)
12633 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12634 rs6000_compare_op1);
12635 break;
12636
17caeff2
JM
12637 case TFmode:
12638 cmp = flag_unsafe_math_optimizations
12639 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12640 rs6000_compare_op1)
12641 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12642 rs6000_compare_op1);
12643 break;
12644
37409796
NS
12645 default:
12646 gcc_unreachable ();
12647 }
a3170dc6
AH
12648 emit_insn (cmp);
12649
a3170dc6 12650 /* OR them together. */
64022b5d
AH
12651 or_result = gen_reg_rtx (CCFPmode);
12652 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12653 compare_result2);
a3170dc6
AH
12654 compare_result = or_result;
12655 code = EQ;
12656 }
12657 else
12658 {
a3170dc6 12659 if (code == NE || code == LTGT)
a3170dc6 12660 code = NE;
423c1189
AH
12661 else
12662 code = EQ;
a3170dc6
AH
12663 }
12664
12665 emit_insn (cmp);
12666 }
12667 else
de17c25f
DE
12668 {
12669 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12670 CLOBBERs to match cmptf_internal2 pattern. */
12671 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12672 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12673 && !TARGET_IEEEQUAD
de17c25f
DE
12674 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12675 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12676 gen_rtvec (9,
12677 gen_rtx_SET (VOIDmode,
12678 compare_result,
12679 gen_rtx_COMPARE (comp_mode,
12680 rs6000_compare_op0,
12681 rs6000_compare_op1)),
12682 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12683 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12684 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12685 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12686 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12687 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12688 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12689 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12690 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12691 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12692 {
12693 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12694 comp_mode = CCEQmode;
12695 compare_result = gen_reg_rtx (CCEQmode);
12696 if (TARGET_64BIT)
12697 emit_insn (gen_stack_protect_testdi (compare_result,
12698 rs6000_compare_op0, op1));
12699 else
12700 emit_insn (gen_stack_protect_testsi (compare_result,
12701 rs6000_compare_op0, op1));
12702 }
de17c25f
DE
12703 else
12704 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12705 gen_rtx_COMPARE (comp_mode,
12706 rs6000_compare_op0,
12707 rs6000_compare_op1)));
12708 }
f676971a 12709
ca5adc63 12710 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12711 under flag_finite_math_only we don't bother. */
39a10a29 12712 if (rs6000_compare_fp_p
e7108df9 12713 && !flag_finite_math_only
8ef65e3d 12714 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12715 && (code == LE || code == GE
12716 || code == UNEQ || code == LTGT
12717 || code == UNGT || code == UNLT))
12718 {
12719 enum rtx_code or1, or2;
12720 rtx or1_rtx, or2_rtx, compare2_rtx;
12721 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12722
39a10a29
GK
12723 switch (code)
12724 {
12725 case LE: or1 = LT; or2 = EQ; break;
12726 case GE: or1 = GT; or2 = EQ; break;
12727 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12728 case LTGT: or1 = LT; or2 = GT; break;
12729 case UNGT: or1 = UNORDERED; or2 = GT; break;
12730 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12731 default: gcc_unreachable ();
39a10a29
GK
12732 }
12733 validate_condition_mode (or1, comp_mode);
12734 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12735 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12736 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12737 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12738 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12739 const_true_rtx);
12740 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12741
12742 compare_result = or_result;
12743 code = EQ;
12744 }
12745
12746 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12747
1c563bed 12748 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12749}
12750
12751
12752/* Emit the RTL for an sCOND pattern. */
12753
12754void
a2369ed3 12755rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12756{
12757 rtx condition_rtx;
12758 enum machine_mode op_mode;
b7053a3f 12759 enum rtx_code cond_code;
39a10a29
GK
12760
12761 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12762 cond_code = GET_CODE (condition_rtx);
12763
8ef65e3d 12764 if (rs6000_compare_fp_p
423c1189
AH
12765 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12766 {
12767 rtx t;
12768
12769 PUT_MODE (condition_rtx, SImode);
12770 t = XEXP (condition_rtx, 0);
12771
37409796 12772 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12773
12774 if (cond_code == NE)
64022b5d 12775 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12776
64022b5d 12777 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12778 return;
12779 }
12780
b7053a3f
GK
12781 if (cond_code == NE
12782 || cond_code == GE || cond_code == LE
12783 || cond_code == GEU || cond_code == LEU
12784 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12785 {
12786 rtx not_result = gen_reg_rtx (CCEQmode);
12787 rtx not_op, rev_cond_rtx;
12788 enum machine_mode cc_mode;
f676971a 12789
b7053a3f
GK
12790 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12791
1c563bed 12792 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12793 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12794 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12795 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12796 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12797 }
39a10a29
GK
12798
12799 op_mode = GET_MODE (rs6000_compare_op0);
12800 if (op_mode == VOIDmode)
12801 op_mode = GET_MODE (rs6000_compare_op1);
12802
12803 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12804 {
12805 PUT_MODE (condition_rtx, DImode);
12806 convert_move (result, condition_rtx, 0);
12807 }
12808 else
12809 {
12810 PUT_MODE (condition_rtx, SImode);
12811 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12812 }
12813}
12814
39a10a29
GK
12815/* Emit a branch of kind CODE to location LOC. */
12816
12817void
a2369ed3 12818rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12819{
12820 rtx condition_rtx, loc_ref;
12821
12822 condition_rtx = rs6000_generate_compare (code);
12823 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12824 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12825 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12826 loc_ref, pc_rtx)));
12827}
12828
12a4e8c5
GK
12829/* Return the string to output a conditional branch to LABEL, which is
12830 the operand number of the label, or -1 if the branch is really a
f676971a 12831 conditional return.
12a4e8c5
GK
12832
12833 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12834 condition code register and its mode specifies what kind of
12835 comparison we made.
12836
a0ab749a 12837 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12838
12839 INSN is the insn. */
12840
12841char *
a2369ed3 12842output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12843{
12844 static char string[64];
12845 enum rtx_code code = GET_CODE (op);
12846 rtx cc_reg = XEXP (op, 0);
12847 enum machine_mode mode = GET_MODE (cc_reg);
12848 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12849 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12850 int really_reversed = reversed ^ need_longbranch;
12851 char *s = string;
12852 const char *ccode;
12853 const char *pred;
12854 rtx note;
12855
39a10a29
GK
12856 validate_condition_mode (code, mode);
12857
12858 /* Work out which way this really branches. We could use
12859 reverse_condition_maybe_unordered here always but this
12860 makes the resulting assembler clearer. */
12a4e8c5 12861 if (really_reversed)
de40e1df
DJ
12862 {
12863 /* Reversal of FP compares takes care -- an ordered compare
12864 becomes an unordered compare and vice versa. */
12865 if (mode == CCFPmode)
12866 code = reverse_condition_maybe_unordered (code);
12867 else
12868 code = reverse_condition (code);
12869 }
12a4e8c5 12870
8ef65e3d 12871 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12872 {
12873 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12874 to the GT bit. */
37409796
NS
12875 switch (code)
12876 {
12877 case EQ:
12878 /* Opposite of GT. */
12879 code = GT;
12880 break;
12881
12882 case NE:
12883 code = UNLE;
12884 break;
12885
12886 default:
12887 gcc_unreachable ();
12888 }
a3170dc6
AH
12889 }
12890
39a10a29 12891 switch (code)
12a4e8c5
GK
12892 {
12893 /* Not all of these are actually distinct opcodes, but
12894 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12895 case NE: case LTGT:
12896 ccode = "ne"; break;
12897 case EQ: case UNEQ:
12898 ccode = "eq"; break;
f676971a 12899 case GE: case GEU:
50a0b056 12900 ccode = "ge"; break;
f676971a 12901 case GT: case GTU: case UNGT:
50a0b056 12902 ccode = "gt"; break;
f676971a 12903 case LE: case LEU:
50a0b056 12904 ccode = "le"; break;
f676971a 12905 case LT: case LTU: case UNLT:
50a0b056 12906 ccode = "lt"; break;
12a4e8c5
GK
12907 case UNORDERED: ccode = "un"; break;
12908 case ORDERED: ccode = "nu"; break;
12909 case UNGE: ccode = "nl"; break;
12910 case UNLE: ccode = "ng"; break;
12911 default:
37409796 12912 gcc_unreachable ();
12a4e8c5 12913 }
f676971a
EC
12914
12915 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12916 The old mnemonics don't have a way to specify this information. */
f4857b9b 12917 pred = "";
12a4e8c5
GK
12918 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12919 if (note != NULL_RTX)
12920 {
12921 /* PROB is the difference from 50%. */
12922 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12923
12924 /* Only hint for highly probable/improbable branches on newer
12925 cpus as static prediction overrides processor dynamic
12926 prediction. For older cpus we may as well always hint, but
12927 assume not taken for branches that are very close to 50% as a
12928 mispredicted taken branch is more expensive than a
f676971a 12929 mispredicted not-taken branch. */
ec507f2d 12930 if (rs6000_always_hint
2c9e13f3
JH
12931 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12932 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12933 {
12934 if (abs (prob) > REG_BR_PROB_BASE / 20
12935 && ((prob > 0) ^ need_longbranch))
c4ad648e 12936 pred = "+";
f4857b9b
AM
12937 else
12938 pred = "-";
12939 }
12a4e8c5 12940 }
12a4e8c5
GK
12941
12942 if (label == NULL)
94a54f47 12943 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12944 else
94a54f47 12945 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12946
37c67319 12947 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12948 Assume they'd only be the first character.... */
37c67319
GK
12949 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12950 *s++ = '%';
94a54f47 12951 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12952
12953 if (label != NULL)
12954 {
12955 /* If the branch distance was too far, we may have to use an
12956 unconditional branch to go the distance. */
12957 if (need_longbranch)
44518ddd 12958 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12959 else
12960 s += sprintf (s, ",%s", label);
12961 }
12962
12963 return string;
12964}
50a0b056 12965
64022b5d 12966/* Return the string to flip the GT bit on a CR. */
423c1189 12967char *
64022b5d 12968output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12969{
12970 static char string[64];
12971 int a, b;
12972
37409796
NS
12973 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12974 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12975
64022b5d
AH
12976 /* GT bit. */
12977 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12978 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12979
12980 sprintf (string, "crnot %d,%d", a, b);
12981 return string;
12982}
12983
21213b4c
DP
12984/* Return insn index for the vector compare instruction for given CODE,
12985 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12986 not available. */
12987
12988static int
94ff898d 12989get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12990 enum machine_mode dest_mode,
12991 enum machine_mode op_mode)
12992{
12993 if (!TARGET_ALTIVEC)
12994 return INSN_NOT_AVAILABLE;
12995
12996 switch (code)
12997 {
12998 case EQ:
12999 if (dest_mode == V16QImode && op_mode == V16QImode)
13000 return UNSPEC_VCMPEQUB;
13001 if (dest_mode == V8HImode && op_mode == V8HImode)
13002 return UNSPEC_VCMPEQUH;
13003 if (dest_mode == V4SImode && op_mode == V4SImode)
13004 return UNSPEC_VCMPEQUW;
13005 if (dest_mode == V4SImode && op_mode == V4SFmode)
13006 return UNSPEC_VCMPEQFP;
13007 break;
13008 case GE:
13009 if (dest_mode == V4SImode && op_mode == V4SFmode)
13010 return UNSPEC_VCMPGEFP;
13011 case GT:
13012 if (dest_mode == V16QImode && op_mode == V16QImode)
13013 return UNSPEC_VCMPGTSB;
13014 if (dest_mode == V8HImode && op_mode == V8HImode)
13015 return UNSPEC_VCMPGTSH;
13016 if (dest_mode == V4SImode && op_mode == V4SImode)
13017 return UNSPEC_VCMPGTSW;
13018 if (dest_mode == V4SImode && op_mode == V4SFmode)
13019 return UNSPEC_VCMPGTFP;
13020 break;
13021 case GTU:
13022 if (dest_mode == V16QImode && op_mode == V16QImode)
13023 return UNSPEC_VCMPGTUB;
13024 if (dest_mode == V8HImode && op_mode == V8HImode)
13025 return UNSPEC_VCMPGTUH;
13026 if (dest_mode == V4SImode && op_mode == V4SImode)
13027 return UNSPEC_VCMPGTUW;
13028 break;
13029 default:
13030 break;
13031 }
13032 return INSN_NOT_AVAILABLE;
13033}
13034
13035/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13036 DMODE is expected destination mode. This is a recursive function. */
13037
13038static rtx
13039rs6000_emit_vector_compare (enum rtx_code rcode,
13040 rtx op0, rtx op1,
13041 enum machine_mode dmode)
13042{
13043 int vec_cmp_insn;
13044 rtx mask;
13045 enum machine_mode dest_mode;
13046 enum machine_mode op_mode = GET_MODE (op1);
13047
37409796
NS
13048 gcc_assert (TARGET_ALTIVEC);
13049 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13050
13051 /* Floating point vector compare instructions uses destination V4SImode.
13052 Move destination to appropriate mode later. */
13053 if (dmode == V4SFmode)
13054 dest_mode = V4SImode;
13055 else
13056 dest_mode = dmode;
13057
13058 mask = gen_reg_rtx (dest_mode);
13059 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13060
13061 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13062 {
13063 bool swap_operands = false;
13064 bool try_again = false;
13065 switch (rcode)
13066 {
13067 case LT:
13068 rcode = GT;
13069 swap_operands = true;
13070 try_again = true;
13071 break;
13072 case LTU:
13073 rcode = GTU;
13074 swap_operands = true;
13075 try_again = true;
13076 break;
13077 case NE:
370df7db
JC
13078 case UNLE:
13079 case UNLT:
13080 case UNGE:
13081 case UNGT:
13082 /* Invert condition and try again.
13083 e.g., A != B becomes ~(A==B). */
21213b4c 13084 {
370df7db 13085 enum rtx_code rev_code;
21213b4c 13086 enum insn_code nor_code;
d1123cde 13087 rtx eq_rtx;
370df7db
JC
13088
13089 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13090 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13091 dest_mode);
94ff898d 13092
166cdb08 13093 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13094 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13095 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13096
13097 if (dmode != dest_mode)
13098 {
13099 rtx temp = gen_reg_rtx (dest_mode);
13100 convert_move (temp, mask, 0);
13101 return temp;
13102 }
13103 return mask;
13104 }
13105 break;
13106 case GE:
13107 case GEU:
13108 case LE:
13109 case LEU:
13110 /* Try GT/GTU/LT/LTU OR EQ */
13111 {
13112 rtx c_rtx, eq_rtx;
13113 enum insn_code ior_code;
13114 enum rtx_code new_code;
13115
37409796
NS
13116 switch (rcode)
13117 {
13118 case GE:
13119 new_code = GT;
13120 break;
13121
13122 case GEU:
13123 new_code = GTU;
13124 break;
13125
13126 case LE:
13127 new_code = LT;
13128 break;
13129
13130 case LEU:
13131 new_code = LTU;
13132 break;
13133
13134 default:
13135 gcc_unreachable ();
13136 }
21213b4c
DP
13137
13138 c_rtx = rs6000_emit_vector_compare (new_code,
13139 op0, op1, dest_mode);
13140 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13141 dest_mode);
13142
166cdb08 13143 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13144 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13145 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13146 if (dmode != dest_mode)
13147 {
13148 rtx temp = gen_reg_rtx (dest_mode);
13149 convert_move (temp, mask, 0);
13150 return temp;
13151 }
13152 return mask;
13153 }
13154 break;
13155 default:
37409796 13156 gcc_unreachable ();
21213b4c
DP
13157 }
13158
13159 if (try_again)
13160 {
13161 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13162 /* You only get two chances. */
13163 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13164 }
13165
13166 if (swap_operands)
13167 {
13168 rtx tmp;
13169 tmp = op0;
13170 op0 = op1;
13171 op1 = tmp;
13172 }
13173 }
13174
915167f5
GK
13175 emit_insn (gen_rtx_SET (VOIDmode, mask,
13176 gen_rtx_UNSPEC (dest_mode,
13177 gen_rtvec (2, op0, op1),
13178 vec_cmp_insn)));
21213b4c
DP
13179 if (dmode != dest_mode)
13180 {
13181 rtx temp = gen_reg_rtx (dest_mode);
13182 convert_move (temp, mask, 0);
13183 return temp;
13184 }
13185 return mask;
13186}
13187
13188/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13189 valid insn doesn exist for given mode. */
13190
13191static int
13192get_vsel_insn (enum machine_mode mode)
13193{
13194 switch (mode)
13195 {
13196 case V4SImode:
13197 return UNSPEC_VSEL4SI;
13198 break;
13199 case V4SFmode:
13200 return UNSPEC_VSEL4SF;
13201 break;
13202 case V8HImode:
13203 return UNSPEC_VSEL8HI;
13204 break;
13205 case V16QImode:
13206 return UNSPEC_VSEL16QI;
13207 break;
13208 default:
13209 return INSN_NOT_AVAILABLE;
13210 break;
13211 }
13212 return INSN_NOT_AVAILABLE;
13213}
13214
13215/* Emit vector select insn where DEST is destination using
13216 operands OP1, OP2 and MASK. */
13217
13218static void
13219rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13220{
13221 rtx t, temp;
13222 enum machine_mode dest_mode = GET_MODE (dest);
13223 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13224
13225 temp = gen_reg_rtx (dest_mode);
94ff898d 13226
bb8df8a6 13227 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13228 select op2. */
915167f5
GK
13229 t = gen_rtx_SET (VOIDmode, temp,
13230 gen_rtx_UNSPEC (dest_mode,
13231 gen_rtvec (3, op2, op1, mask),
13232 vsel_insn_index));
21213b4c
DP
13233 emit_insn (t);
13234 emit_move_insn (dest, temp);
13235 return;
13236}
13237
94ff898d 13238/* Emit vector conditional expression.
21213b4c
DP
13239 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13240 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13241
13242int
13243rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13244 rtx cond, rtx cc_op0, rtx cc_op1)
13245{
13246 enum machine_mode dest_mode = GET_MODE (dest);
13247 enum rtx_code rcode = GET_CODE (cond);
13248 rtx mask;
13249
13250 if (!TARGET_ALTIVEC)
13251 return 0;
13252
13253 /* Get the vector mask for the given relational operations. */
13254 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13255
13256 rs6000_emit_vector_select (dest, op1, op2, mask);
13257
13258 return 1;
13259}
13260
50a0b056
GK
13261/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13262 operands of the last comparison is nonzero/true, FALSE_COND if it
13263 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13264
50a0b056 13265int
a2369ed3 13266rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13267{
13268 enum rtx_code code = GET_CODE (op);
13269 rtx op0 = rs6000_compare_op0;
13270 rtx op1 = rs6000_compare_op1;
13271 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13272 enum machine_mode compare_mode = GET_MODE (op0);
13273 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13274 rtx temp;
add2402e 13275 bool is_against_zero;
50a0b056 13276
a3c9585f 13277 /* These modes should always match. */
a3170dc6
AH
13278 if (GET_MODE (op1) != compare_mode
13279 /* In the isel case however, we can use a compare immediate, so
13280 op1 may be a small constant. */
13281 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13282 return 0;
178c3eff 13283 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13284 return 0;
178c3eff 13285 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13286 return 0;
13287
50a0b056 13288 /* First, work out if the hardware can do this at all, or
a3c9585f 13289 if it's too slow.... */
50a0b056 13290 if (! rs6000_compare_fp_p)
a3170dc6
AH
13291 {
13292 if (TARGET_ISEL)
13293 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13294 return 0;
13295 }
8ef65e3d 13296 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13297 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13298 return 0;
50a0b056 13299
add2402e 13300 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13301
add2402e
GK
13302 /* A floating-point subtract might overflow, underflow, or produce
13303 an inexact result, thus changing the floating-point flags, so it
13304 can't be generated if we care about that. It's safe if one side
13305 of the construct is zero, since then no subtract will be
13306 generated. */
ebb109ad 13307 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13308 && flag_trapping_math && ! is_against_zero)
13309 return 0;
13310
50a0b056
GK
13311 /* Eliminate half of the comparisons by switching operands, this
13312 makes the remaining code simpler. */
13313 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13314 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13315 {
13316 code = reverse_condition_maybe_unordered (code);
13317 temp = true_cond;
13318 true_cond = false_cond;
13319 false_cond = temp;
13320 }
13321
13322 /* UNEQ and LTGT take four instructions for a comparison with zero,
13323 it'll probably be faster to use a branch here too. */
bc9ec0e0 13324 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13325 return 0;
f676971a 13326
50a0b056
GK
13327 if (GET_CODE (op1) == CONST_DOUBLE)
13328 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13329
b6d08ca1 13330 /* We're going to try to implement comparisons by performing
50a0b056
GK
13331 a subtract, then comparing against zero. Unfortunately,
13332 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13333 know that the operand is finite and the comparison
50a0b056 13334 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13335 if (HONOR_INFINITIES (compare_mode)
50a0b056 13336 && code != GT && code != UNGE
045572c7 13337 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13338 /* Constructs of the form (a OP b ? a : b) are safe. */
13339 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13340 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13341 && ! rtx_equal_p (op1, true_cond))))
13342 return 0;
add2402e 13343
50a0b056
GK
13344 /* At this point we know we can use fsel. */
13345
13346 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13347 if (! is_against_zero)
13348 {
13349 temp = gen_reg_rtx (compare_mode);
13350 emit_insn (gen_rtx_SET (VOIDmode, temp,
13351 gen_rtx_MINUS (compare_mode, op0, op1)));
13352 op0 = temp;
13353 op1 = CONST0_RTX (compare_mode);
13354 }
50a0b056
GK
13355
13356 /* If we don't care about NaNs we can reduce some of the comparisons
13357 down to faster ones. */
bc9ec0e0 13358 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13359 switch (code)
13360 {
13361 case GT:
13362 code = LE;
13363 temp = true_cond;
13364 true_cond = false_cond;
13365 false_cond = temp;
13366 break;
13367 case UNGE:
13368 code = GE;
13369 break;
13370 case UNEQ:
13371 code = EQ;
13372 break;
13373 default:
13374 break;
13375 }
13376
13377 /* Now, reduce everything down to a GE. */
13378 switch (code)
13379 {
13380 case GE:
13381 break;
13382
13383 case LE:
3148ad6d
DJ
13384 temp = gen_reg_rtx (compare_mode);
13385 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13386 op0 = temp;
13387 break;
13388
13389 case ORDERED:
3148ad6d
DJ
13390 temp = gen_reg_rtx (compare_mode);
13391 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13392 op0 = temp;
13393 break;
13394
13395 case EQ:
3148ad6d 13396 temp = gen_reg_rtx (compare_mode);
f676971a 13397 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13398 gen_rtx_NEG (compare_mode,
13399 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13400 op0 = temp;
13401 break;
13402
13403 case UNGE:
bc9ec0e0 13404 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13405 temp = gen_reg_rtx (result_mode);
50a0b056 13406 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13407 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13408 gen_rtx_GE (VOIDmode,
13409 op0, op1),
13410 true_cond, false_cond)));
bc9ec0e0
GK
13411 false_cond = true_cond;
13412 true_cond = temp;
50a0b056 13413
3148ad6d
DJ
13414 temp = gen_reg_rtx (compare_mode);
13415 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13416 op0 = temp;
13417 break;
13418
13419 case GT:
bc9ec0e0 13420 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13421 temp = gen_reg_rtx (result_mode);
50a0b056 13422 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13423 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13424 gen_rtx_GE (VOIDmode,
13425 op0, op1),
13426 true_cond, false_cond)));
bc9ec0e0
GK
13427 true_cond = false_cond;
13428 false_cond = temp;
50a0b056 13429
3148ad6d
DJ
13430 temp = gen_reg_rtx (compare_mode);
13431 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13432 op0 = temp;
13433 break;
13434
13435 default:
37409796 13436 gcc_unreachable ();
50a0b056
GK
13437 }
13438
13439 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13440 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13441 gen_rtx_GE (VOIDmode,
13442 op0, op1),
13443 true_cond, false_cond)));
13444 return 1;
13445}
13446
a3170dc6
AH
13447/* Same as above, but for ints (isel). */
13448
13449static int
a2369ed3 13450rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13451{
13452 rtx condition_rtx, cr;
13453
13454 /* All isel implementations thus far are 32-bits. */
13455 if (GET_MODE (rs6000_compare_op0) != SImode)
13456 return 0;
13457
13458 /* We still have to do the compare, because isel doesn't do a
13459 compare, it just looks at the CRx bits set by a previous compare
13460 instruction. */
13461 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13462 cr = XEXP (condition_rtx, 0);
13463
13464 if (GET_MODE (cr) == CCmode)
13465 emit_insn (gen_isel_signed (dest, condition_rtx,
13466 true_cond, false_cond, cr));
13467 else
13468 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13469 true_cond, false_cond, cr));
13470
13471 return 1;
13472}
13473
13474const char *
a2369ed3 13475output_isel (rtx *operands)
a3170dc6
AH
13476{
13477 enum rtx_code code;
13478
13479 code = GET_CODE (operands[1]);
13480 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13481 {
13482 PUT_CODE (operands[1], reverse_condition (code));
13483 return "isel %0,%3,%2,%j1";
13484 }
13485 else
13486 return "isel %0,%2,%3,%j1";
13487}
13488
50a0b056 13489void
a2369ed3 13490rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13491{
13492 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13493 enum rtx_code c;
50a0b056 13494 rtx target;
5dc8d536
AH
13495
13496 if (code == SMAX || code == SMIN)
13497 c = GE;
13498 else
13499 c = GEU;
13500
50a0b056 13501 if (code == SMAX || code == UMAX)
f676971a 13502 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13503 op0, op1, mode, 0);
13504 else
f676971a 13505 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13506 op1, op0, mode, 0);
37409796 13507 gcc_assert (target);
50a0b056
GK
13508 if (target != dest)
13509 emit_move_insn (dest, target);
13510}
46c07df8 13511
915167f5
GK
13512/* Emit instructions to perform a load-reserved/store-conditional operation.
13513 The operation performed is an atomic
13514 (set M (CODE:MODE M OP))
13515 If not NULL, BEFORE is atomically set to M before the operation, and
13516 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13517 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13518 Either OP or M may be wrapped in a NOT operation. */
13519
13520void
13521rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13522 rtx m, rtx op, rtx before_param, rtx after_param,
13523 bool sync_p)
13524{
13525 enum machine_mode used_mode;
13526 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13527 rtx used_m;
13528 rtvec vec;
13529 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13530 rtx shift = NULL_RTX;
bb8df8a6 13531
915167f5
GK
13532 if (sync_p)
13533 emit_insn (gen_memory_barrier ());
bb8df8a6 13534
915167f5
GK
13535 if (GET_CODE (m) == NOT)
13536 used_m = XEXP (m, 0);
13537 else
13538 used_m = m;
13539
13540 /* If this is smaller than SImode, we'll have to use SImode with
13541 adjustments. */
13542 if (mode == QImode || mode == HImode)
13543 {
13544 rtx newop, oldop;
13545
13546 if (MEM_ALIGN (used_m) >= 32)
13547 {
13548 int ishift = 0;
13549 if (BYTES_BIG_ENDIAN)
13550 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13551
915167f5 13552 shift = GEN_INT (ishift);
c75c6d11 13553 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13554 }
13555 else
13556 {
13557 rtx addrSI, aligned_addr;
a9c9d3fa 13558 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13559
c75c6d11
JJ
13560 addrSI = gen_lowpart_common (SImode,
13561 force_reg (Pmode, XEXP (used_m, 0)));
13562 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13563 shift = gen_reg_rtx (SImode);
13564
13565 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13566 GEN_INT (shift_mask)));
13567 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13568
13569 aligned_addr = expand_binop (Pmode, and_optab,
13570 XEXP (used_m, 0),
13571 GEN_INT (-4), NULL_RTX,
13572 1, OPTAB_LIB_WIDEN);
13573 used_m = change_address (used_m, SImode, aligned_addr);
13574 set_mem_align (used_m, 32);
915167f5 13575 }
c75c6d11
JJ
13576 /* It's safe to keep the old alias set of USED_M, because
13577 the operation is atomic and only affects the original
13578 USED_M. */
13579 if (GET_CODE (m) == NOT)
13580 m = gen_rtx_NOT (SImode, used_m);
13581 else
13582 m = used_m;
915167f5
GK
13583
13584 if (GET_CODE (op) == NOT)
13585 {
13586 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13587 oldop = gen_rtx_NOT (SImode, oldop);
13588 }
13589 else
13590 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13591
915167f5
GK
13592 switch (code)
13593 {
13594 case IOR:
13595 case XOR:
13596 newop = expand_binop (SImode, and_optab,
13597 oldop, GEN_INT (imask), NULL_RTX,
13598 1, OPTAB_LIB_WIDEN);
13599 emit_insn (gen_ashlsi3 (newop, newop, shift));
13600 break;
13601
13602 case AND:
13603 newop = expand_binop (SImode, ior_optab,
13604 oldop, GEN_INT (~imask), NULL_RTX,
13605 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13606 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13607 break;
13608
13609 case PLUS:
9f0076e5 13610 case MINUS:
915167f5
GK
13611 {
13612 rtx mask;
bb8df8a6 13613
915167f5
GK
13614 newop = expand_binop (SImode, and_optab,
13615 oldop, GEN_INT (imask), NULL_RTX,
13616 1, OPTAB_LIB_WIDEN);
13617 emit_insn (gen_ashlsi3 (newop, newop, shift));
13618
13619 mask = gen_reg_rtx (SImode);
13620 emit_move_insn (mask, GEN_INT (imask));
13621 emit_insn (gen_ashlsi3 (mask, mask, shift));
13622
9f0076e5
DE
13623 if (code == PLUS)
13624 newop = gen_rtx_PLUS (SImode, m, newop);
13625 else
13626 newop = gen_rtx_MINUS (SImode, m, newop);
13627 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13628 newop = gen_rtx_IOR (SImode, newop,
13629 gen_rtx_AND (SImode,
13630 gen_rtx_NOT (SImode, mask),
13631 m));
13632 break;
13633 }
13634
13635 default:
13636 gcc_unreachable ();
13637 }
13638
a9c9d3fa
GK
13639 if (GET_CODE (m) == NOT)
13640 {
13641 rtx mask, xorm;
13642
13643 mask = gen_reg_rtx (SImode);
13644 emit_move_insn (mask, GEN_INT (imask));
13645 emit_insn (gen_ashlsi3 (mask, mask, shift));
13646
13647 xorm = gen_rtx_XOR (SImode, used_m, mask);
13648 /* Depending on the value of 'op', the XOR or the operation might
13649 be able to be simplified away. */
13650 newop = simplify_gen_binary (code, SImode, xorm, newop);
13651 }
915167f5
GK
13652 op = newop;
13653 used_mode = SImode;
13654 before = gen_reg_rtx (used_mode);
13655 after = gen_reg_rtx (used_mode);
13656 }
13657 else
13658 {
13659 used_mode = mode;
13660 before = before_param;
13661 after = after_param;
13662
13663 if (before == NULL_RTX)
13664 before = gen_reg_rtx (used_mode);
13665 if (after == NULL_RTX)
13666 after = gen_reg_rtx (used_mode);
13667 }
bb8df8a6 13668
9f0076e5
DE
13669 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13670 && used_mode != mode)
915167f5
GK
13671 the_op = op; /* Computed above. */
13672 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13673 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13674 else
13675 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13676
13677 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13678 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13679 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13680 gen_rtx_UNSPEC (used_mode,
13681 gen_rtvec (1, the_op),
13682 UNSPEC_SYNC_OP));
915167f5
GK
13683 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13684
9f0076e5 13685 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13686 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13687 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13688 else
13689 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13690 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13691
13692 /* Shift and mask the return values properly. */
13693 if (used_mode != mode && before_param)
13694 {
13695 emit_insn (gen_lshrsi3 (before, before, shift));
13696 convert_move (before_param, before, 1);
13697 }
13698
13699 if (used_mode != mode && after_param)
13700 {
13701 emit_insn (gen_lshrsi3 (after, after, shift));
13702 convert_move (after_param, after, 1);
13703 }
13704
13705 /* The previous sequence will end with a branch that's dependent on
13706 the conditional store, so placing an isync will ensure that no
13707 other instructions (especially, no load or store instructions)
13708 can start before the atomic operation completes. */
13709 if (sync_p)
13710 emit_insn (gen_isync ());
13711}
13712
b52110d4
DE
13713/* A subroutine of the atomic operation splitters. Jump to LABEL if
13714 COND is true. Mark the jump as unlikely to be taken. */
13715
13716static void
13717emit_unlikely_jump (rtx cond, rtx label)
13718{
13719 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13720 rtx x;
13721
13722 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13723 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13724 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13725}
13726
13727/* A subroutine of the atomic operation splitters. Emit a load-locked
13728 instruction in MODE. */
13729
13730static void
13731emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13732{
13733 rtx (*fn) (rtx, rtx) = NULL;
13734 if (mode == SImode)
13735 fn = gen_load_locked_si;
13736 else if (mode == DImode)
13737 fn = gen_load_locked_di;
13738 emit_insn (fn (reg, mem));
13739}
13740
13741/* A subroutine of the atomic operation splitters. Emit a store-conditional
13742 instruction in MODE. */
13743
13744static void
13745emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13746{
13747 rtx (*fn) (rtx, rtx, rtx) = NULL;
13748 if (mode == SImode)
13749 fn = gen_store_conditional_si;
13750 else if (mode == DImode)
13751 fn = gen_store_conditional_di;
13752
9f0076e5 13753 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13754 if (PPC405_ERRATUM77)
13755 emit_insn (gen_memory_barrier ());
13756
13757 emit_insn (fn (res, mem, val));
13758}
13759
ea2c620c 13760/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13761 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13762 operand of the binary operator. BEFORE and AFTER are optional locations to
13763 return the value of MEM either before of after the operation. SCRATCH is
13764 a scratch register. */
13765
13766void
13767rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13768 rtx before, rtx after, rtx scratch)
13769{
13770 enum machine_mode mode = GET_MODE (mem);
13771 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13772
13773 emit_insn (gen_memory_barrier ());
13774
13775 label = gen_label_rtx ();
13776 emit_label (label);
13777 label = gen_rtx_LABEL_REF (VOIDmode, label);
13778
13779 if (before == NULL_RTX)
13780 before = scratch;
13781 emit_load_locked (mode, before, mem);
13782
13783 if (code == NOT)
13784 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13785 else if (code == AND)
13786 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13787 else
13788 x = gen_rtx_fmt_ee (code, mode, before, val);
13789
13790 if (after != NULL_RTX)
13791 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13792 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13793
13794 emit_store_conditional (mode, cond, mem, scratch);
13795
13796 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13797 emit_unlikely_jump (x, label);
13798
13799 emit_insn (gen_isync ());
13800}
13801
b52110d4
DE
13802/* Expand an atomic compare and swap operation. MEM is the memory on which
13803 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13804 value to be stored. SCRATCH is a scratch GPR. */
13805
13806void
13807rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13808 rtx scratch)
13809{
13810 enum machine_mode mode = GET_MODE (mem);
13811 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13812
13813 emit_insn (gen_memory_barrier ());
13814
13815 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13816 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13817 emit_label (XEXP (label1, 0));
13818
13819 emit_load_locked (mode, retval, mem);
13820
13821 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13822 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13823
13824 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13825 emit_unlikely_jump (x, label2);
13826
13827 emit_move_insn (scratch, newval);
13828 emit_store_conditional (mode, cond, mem, scratch);
13829
13830 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13831 emit_unlikely_jump (x, label1);
13832
13833 emit_insn (gen_isync ());
13834 emit_label (XEXP (label2, 0));
13835}
13836
13837/* Expand an atomic test and set operation. MEM is the memory on which
13838 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13839
13840void
13841rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13842{
13843 enum machine_mode mode = GET_MODE (mem);
13844 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13845
13846 emit_insn (gen_memory_barrier ());
13847
13848 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13849 emit_label (XEXP (label, 0));
13850
13851 emit_load_locked (mode, retval, mem);
13852 emit_move_insn (scratch, val);
13853 emit_store_conditional (mode, cond, mem, scratch);
13854
13855 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13856 emit_unlikely_jump (x, label);
13857
13858 emit_insn (gen_isync ());
13859}
13860
9fc75b97
DE
13861void
13862rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13863{
13864 enum machine_mode mode = GET_MODE (mem);
13865 rtx addrSI, align, wdst, shift, mask;
13866 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13867 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13868
13869 /* Shift amount for subword relative to aligned word. */
13870 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13871 shift = gen_reg_rtx (SImode);
13872 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13873 GEN_INT (shift_mask)));
13874 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13875
13876 /* Shift and mask old value into position within word. */
13877 oldval = convert_modes (SImode, mode, oldval, 1);
13878 oldval = expand_binop (SImode, and_optab,
13879 oldval, GEN_INT (imask), NULL_RTX,
13880 1, OPTAB_LIB_WIDEN);
13881 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13882
13883 /* Shift and mask new value into position within word. */
13884 newval = convert_modes (SImode, mode, newval, 1);
13885 newval = expand_binop (SImode, and_optab,
13886 newval, GEN_INT (imask), NULL_RTX,
13887 1, OPTAB_LIB_WIDEN);
13888 emit_insn (gen_ashlsi3 (newval, newval, shift));
13889
13890 /* Mask for insertion. */
13891 mask = gen_reg_rtx (SImode);
13892 emit_move_insn (mask, GEN_INT (imask));
13893 emit_insn (gen_ashlsi3 (mask, mask, shift));
13894
13895 /* Address of aligned word containing subword. */
13896 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13897 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13898 mem = change_address (mem, SImode, align);
13899 set_mem_align (mem, 32);
13900 MEM_VOLATILE_P (mem) = 1;
13901
13902 wdst = gen_reg_rtx (SImode);
13903 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13904 oldval, newval, mem));
13905
13906 emit_move_insn (dst, gen_lowpart (mode, wdst));
13907}
13908
13909void
13910rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13911 rtx oldval, rtx newval, rtx mem,
13912 rtx scratch)
13913{
13914 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13915
13916 emit_insn (gen_memory_barrier ());
13917 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13918 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13919 emit_label (XEXP (label1, 0));
13920
13921 emit_load_locked (SImode, scratch, mem);
13922
13923 /* Mask subword within loaded value for comparison with oldval.
13924 Use UNSPEC_AND to avoid clobber.*/
13925 emit_insn (gen_rtx_SET (SImode, dest,
13926 gen_rtx_UNSPEC (SImode,
13927 gen_rtvec (2, scratch, mask),
13928 UNSPEC_AND)));
13929
13930 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13931 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13932
13933 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13934 emit_unlikely_jump (x, label2);
13935
13936 /* Clear subword within loaded value for insertion of new value. */
13937 emit_insn (gen_rtx_SET (SImode, scratch,
13938 gen_rtx_AND (SImode,
13939 gen_rtx_NOT (SImode, mask), scratch)));
13940 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13941 emit_store_conditional (SImode, cond, mem, scratch);
13942
13943 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13944 emit_unlikely_jump (x, label1);
13945
13946 emit_insn (gen_isync ());
13947 emit_label (XEXP (label2, 0));
13948}
13949
13950
b52110d4 13951 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13952 multi-register moves. It will emit at most one instruction for
13953 each register that is accessed; that is, it won't emit li/lis pairs
13954 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13955 register. */
46c07df8 13956
46c07df8 13957void
a9baceb1 13958rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13959{
a9baceb1
GK
13960 /* The register number of the first register being moved. */
13961 int reg;
13962 /* The mode that is to be moved. */
13963 enum machine_mode mode;
13964 /* The mode that the move is being done in, and its size. */
13965 enum machine_mode reg_mode;
13966 int reg_mode_size;
13967 /* The number of registers that will be moved. */
13968 int nregs;
13969
13970 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13971 mode = GET_MODE (dst);
c8b622ff 13972 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13973 if (FP_REGNO_P (reg))
7393f7f8 13974 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13975 else if (ALTIVEC_REGNO_P (reg))
13976 reg_mode = V16QImode;
4d4447b5
PB
13977 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13978 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13979 else
13980 reg_mode = word_mode;
13981 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13982
37409796 13983 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13984
a9baceb1
GK
13985 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13986 {
13987 /* Move register range backwards, if we might have destructive
13988 overlap. */
13989 int i;
13990 for (i = nregs - 1; i >= 0; i--)
f676971a 13991 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13992 simplify_gen_subreg (reg_mode, dst, mode,
13993 i * reg_mode_size),
13994 simplify_gen_subreg (reg_mode, src, mode,
13995 i * reg_mode_size)));
13996 }
46c07df8
HP
13997 else
13998 {
a9baceb1
GK
13999 int i;
14000 int j = -1;
14001 bool used_update = false;
46c07df8 14002
c1e55850 14003 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14004 {
14005 rtx breg;
3a1f863f 14006
a9baceb1
GK
14007 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14008 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14009 {
14010 rtx delta_rtx;
a9baceb1 14011 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14012 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14013 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14014 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14015 emit_insn (TARGET_32BIT
14016 ? gen_addsi3 (breg, breg, delta_rtx)
14017 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14018 src = replace_equiv_address (src, breg);
3a1f863f 14019 }
d04b6e6e 14020 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14021 {
13e2e16e 14022 rtx basereg;
c1e55850
GK
14023 basereg = gen_rtx_REG (Pmode, reg);
14024 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14025 src = replace_equiv_address (src, basereg);
c1e55850 14026 }
3a1f863f 14027
0423421f
AM
14028 breg = XEXP (src, 0);
14029 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14030 breg = XEXP (breg, 0);
14031
14032 /* If the base register we are using to address memory is
14033 also a destination reg, then change that register last. */
14034 if (REG_P (breg)
14035 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14036 && REGNO (breg) < REGNO (dst) + nregs)
14037 j = REGNO (breg) - REGNO (dst);
c4ad648e 14038 }
46c07df8 14039
a9baceb1 14040 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14041 {
14042 rtx breg;
14043
a9baceb1
GK
14044 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14045 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14046 {
14047 rtx delta_rtx;
a9baceb1 14048 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14049 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14050 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14051 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14052
14053 /* We have to update the breg before doing the store.
14054 Use store with update, if available. */
14055
14056 if (TARGET_UPDATE)
14057 {
a9baceb1 14058 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14059 emit_insn (TARGET_32BIT
14060 ? (TARGET_POWERPC64
14061 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14062 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14063 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14064 used_update = true;
3a1f863f
DE
14065 }
14066 else
a9baceb1
GK
14067 emit_insn (TARGET_32BIT
14068 ? gen_addsi3 (breg, breg, delta_rtx)
14069 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14070 dst = replace_equiv_address (dst, breg);
3a1f863f 14071 }
37409796 14072 else
d04b6e6e 14073 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14074 }
14075
46c07df8 14076 for (i = 0; i < nregs; i++)
f676971a 14077 {
3a1f863f
DE
14078 /* Calculate index to next subword. */
14079 ++j;
f676971a 14080 if (j == nregs)
3a1f863f 14081 j = 0;
46c07df8 14082
112cdef5 14083 /* If compiler already emitted move of first word by
a9baceb1 14084 store with update, no need to do anything. */
3a1f863f 14085 if (j == 0 && used_update)
a9baceb1 14086 continue;
f676971a 14087
a9baceb1
GK
14088 emit_insn (gen_rtx_SET (VOIDmode,
14089 simplify_gen_subreg (reg_mode, dst, mode,
14090 j * reg_mode_size),
14091 simplify_gen_subreg (reg_mode, src, mode,
14092 j * reg_mode_size)));
3a1f863f 14093 }
46c07df8
HP
14094 }
14095}
14096
12a4e8c5 14097\f
a4f6c312
SS
14098/* This page contains routines that are used to determine what the
14099 function prologue and epilogue code will do and write them out. */
9878760c 14100
a4f6c312
SS
14101/* Return the first fixed-point register that is required to be
14102 saved. 32 if none. */
9878760c
RK
14103
14104int
863d938c 14105first_reg_to_save (void)
9878760c
RK
14106{
14107 int first_reg;
14108
14109 /* Find lowest numbered live register. */
14110 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14111 if (df_regs_ever_live_p (first_reg)
a38d360d 14112 && (! call_used_regs[first_reg]
1db02437 14113 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14114 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14115 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14116 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14117 break;
14118
ee890fe2 14119#if TARGET_MACHO
93638d7a
AM
14120 if (flag_pic
14121 && current_function_uses_pic_offset_table
14122 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14123 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14124#endif
14125
9878760c
RK
14126 return first_reg;
14127}
14128
14129/* Similar, for FP regs. */
14130
14131int
863d938c 14132first_fp_reg_to_save (void)
9878760c
RK
14133{
14134 int first_reg;
14135
14136 /* Find lowest numbered live register. */
14137 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14138 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14139 break;
14140
14141 return first_reg;
14142}
00b960c7
AH
14143
14144/* Similar, for AltiVec regs. */
14145
14146static int
863d938c 14147first_altivec_reg_to_save (void)
00b960c7
AH
14148{
14149 int i;
14150
14151 /* Stack frame remains as is unless we are in AltiVec ABI. */
14152 if (! TARGET_ALTIVEC_ABI)
14153 return LAST_ALTIVEC_REGNO + 1;
14154
22fa69da 14155 /* On Darwin, the unwind routines are compiled without
982afe02 14156 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14157 altivec registers when necessary. */
14158 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14159 && ! TARGET_ALTIVEC)
14160 return FIRST_ALTIVEC_REGNO + 20;
14161
00b960c7
AH
14162 /* Find lowest numbered live register. */
14163 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14164 if (df_regs_ever_live_p (i))
00b960c7
AH
14165 break;
14166
14167 return i;
14168}
14169
14170/* Return a 32-bit mask of the AltiVec registers we need to set in
14171 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14172 the 32-bit word is 0. */
14173
14174static unsigned int
863d938c 14175compute_vrsave_mask (void)
00b960c7
AH
14176{
14177 unsigned int i, mask = 0;
14178
22fa69da 14179 /* On Darwin, the unwind routines are compiled without
982afe02 14180 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14181 call-saved altivec registers when necessary. */
14182 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14183 && ! TARGET_ALTIVEC)
14184 mask |= 0xFFF;
14185
00b960c7
AH
14186 /* First, find out if we use _any_ altivec registers. */
14187 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14188 if (df_regs_ever_live_p (i))
00b960c7
AH
14189 mask |= ALTIVEC_REG_BIT (i);
14190
14191 if (mask == 0)
14192 return mask;
14193
00b960c7
AH
14194 /* Next, remove the argument registers from the set. These must
14195 be in the VRSAVE mask set by the caller, so we don't need to add
14196 them in again. More importantly, the mask we compute here is
14197 used to generate CLOBBERs in the set_vrsave insn, and we do not
14198 wish the argument registers to die. */
a6cf80f2 14199 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14200 mask &= ~ALTIVEC_REG_BIT (i);
14201
14202 /* Similarly, remove the return value from the set. */
14203 {
14204 bool yes = false;
14205 diddle_return_value (is_altivec_return_reg, &yes);
14206 if (yes)
14207 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14208 }
14209
14210 return mask;
14211}
14212
d62294f5 14213/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14214 size of prologues/epilogues by calling our own save/restore-the-world
14215 routines. */
d62294f5
FJ
14216
14217static void
f57fe068
AM
14218compute_save_world_info (rs6000_stack_t *info_ptr)
14219{
14220 info_ptr->world_save_p = 1;
14221 info_ptr->world_save_p
14222 = (WORLD_SAVE_P (info_ptr)
14223 && DEFAULT_ABI == ABI_DARWIN
14224 && ! (current_function_calls_setjmp && flag_exceptions)
14225 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14226 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14227 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14228 && info_ptr->cr_save_p);
f676971a 14229
d62294f5
FJ
14230 /* This will not work in conjunction with sibcalls. Make sure there
14231 are none. (This check is expensive, but seldom executed.) */
f57fe068 14232 if (WORLD_SAVE_P (info_ptr))
f676971a 14233 {
d62294f5
FJ
14234 rtx insn;
14235 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14236 if ( GET_CODE (insn) == CALL_INSN
14237 && SIBLING_CALL_P (insn))
14238 {
14239 info_ptr->world_save_p = 0;
14240 break;
14241 }
d62294f5 14242 }
f676971a 14243
f57fe068 14244 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14245 {
14246 /* Even if we're not touching VRsave, make sure there's room on the
14247 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14248 will attempt to save it. */
d62294f5
FJ
14249 info_ptr->vrsave_size = 4;
14250
14251 /* "Save" the VRsave register too if we're saving the world. */
14252 if (info_ptr->vrsave_mask == 0)
c4ad648e 14253 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14254
14255 /* Because the Darwin register save/restore routines only handle
c4ad648e 14256 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14257 check. */
37409796
NS
14258 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14259 && (info_ptr->first_altivec_reg_save
14260 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14261 }
f676971a 14262 return;
d62294f5
FJ
14263}
14264
14265
00b960c7 14266static void
a2369ed3 14267is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14268{
14269 bool *yes = (bool *) xyes;
14270 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14271 *yes = true;
14272}
14273
4697a36c
MM
14274\f
14275/* Calculate the stack information for the current function. This is
14276 complicated by having two separate calling sequences, the AIX calling
14277 sequence and the V.4 calling sequence.
14278
592696dd 14279 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14280 32-bit 64-bit
4697a36c 14281 SP----> +---------------------------------------+
a260abc9 14282 | back chain to caller | 0 0
4697a36c 14283 +---------------------------------------+
a260abc9 14284 | saved CR | 4 8 (8-11)
4697a36c 14285 +---------------------------------------+
a260abc9 14286 | saved LR | 8 16
4697a36c 14287 +---------------------------------------+
a260abc9 14288 | reserved for compilers | 12 24
4697a36c 14289 +---------------------------------------+
a260abc9 14290 | reserved for binders | 16 32
4697a36c 14291 +---------------------------------------+
a260abc9 14292 | saved TOC pointer | 20 40
4697a36c 14293 +---------------------------------------+
a260abc9 14294 | Parameter save area (P) | 24 48
4697a36c 14295 +---------------------------------------+
a260abc9 14296 | Alloca space (A) | 24+P etc.
802a0058 14297 +---------------------------------------+
a7df97e6 14298 | Local variable space (L) | 24+P+A
4697a36c 14299 +---------------------------------------+
a7df97e6 14300 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14301 +---------------------------------------+
00b960c7
AH
14302 | Save area for AltiVec registers (W) | 24+P+A+L+X
14303 +---------------------------------------+
14304 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14305 +---------------------------------------+
14306 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14307 +---------------------------------------+
00b960c7
AH
14308 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14309 +---------------------------------------+
14310 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14311 +---------------------------------------+
14312 old SP->| back chain to caller's caller |
14313 +---------------------------------------+
14314
5376a30c
KR
14315 The required alignment for AIX configurations is two words (i.e., 8
14316 or 16 bytes).
14317
14318
4697a36c
MM
14319 V.4 stack frames look like:
14320
14321 SP----> +---------------------------------------+
14322 | back chain to caller | 0
14323 +---------------------------------------+
5eb387b8 14324 | caller's saved LR | 4
4697a36c
MM
14325 +---------------------------------------+
14326 | Parameter save area (P) | 8
14327 +---------------------------------------+
a7df97e6 14328 | Alloca space (A) | 8+P
f676971a 14329 +---------------------------------------+
a7df97e6 14330 | Varargs save area (V) | 8+P+A
f676971a 14331 +---------------------------------------+
a7df97e6 14332 | Local variable space (L) | 8+P+A+V
f676971a 14333 +---------------------------------------+
a7df97e6 14334 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14335 +---------------------------------------+
00b960c7
AH
14336 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14337 +---------------------------------------+
14338 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14339 +---------------------------------------+
14340 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14341 +---------------------------------------+
c4ad648e
AM
14342 | SPE: area for 64-bit GP registers |
14343 +---------------------------------------+
14344 | SPE alignment padding |
14345 +---------------------------------------+
00b960c7 14346 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14347 +---------------------------------------+
00b960c7 14348 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14349 +---------------------------------------+
00b960c7 14350 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14351 +---------------------------------------+
14352 old SP->| back chain to caller's caller |
14353 +---------------------------------------+
b6c9286a 14354
5376a30c
KR
14355 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14356 given. (But note below and in sysv4.h that we require only 8 and
14357 may round up the size of our stack frame anyways. The historical
14358 reason is early versions of powerpc-linux which didn't properly
14359 align the stack at program startup. A happy side-effect is that
14360 -mno-eabi libraries can be used with -meabi programs.)
14361
50d440bc 14362 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14363 the stack alignment requirements may differ. If -mno-eabi is not
14364 given, the required stack alignment is 8 bytes; if -mno-eabi is
14365 given, the required alignment is 16 bytes. (But see V.4 comment
14366 above.) */
4697a36c 14367
61b2fbe7
MM
14368#ifndef ABI_STACK_BOUNDARY
14369#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14370#endif
14371
d1d0c603 14372static rs6000_stack_t *
863d938c 14373rs6000_stack_info (void)
4697a36c 14374{
022123e6 14375 static rs6000_stack_t info;
4697a36c 14376 rs6000_stack_t *info_ptr = &info;
327e5343 14377 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14378 int ehrd_size;
64045029 14379 int save_align;
8070c91a 14380 int first_gp;
44688022 14381 HOST_WIDE_INT non_fixed_size;
4697a36c 14382
022123e6 14383 memset (&info, 0, sizeof (info));
4697a36c 14384
c19de7aa
AH
14385 if (TARGET_SPE)
14386 {
14387 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14388 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14389 cfun->machine->insn_chain_scanned_p
14390 = spe_func_has_64bit_regs_p () + 1;
14391 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14392 }
14393
a4f6c312 14394 /* Select which calling sequence. */
178274da 14395 info_ptr->abi = DEFAULT_ABI;
9878760c 14396
a4f6c312 14397 /* Calculate which registers need to be saved & save area size. */
4697a36c 14398 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14399 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14400 even if it currently looks like we won't. Reload may need it to
14401 get at a constant; if so, it will have already created a constant
14402 pool entry for it. */
2bfcf297 14403 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14404 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14405 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14406 && current_function_uses_const_pool
1db02437 14407 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14408 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14409 else
8070c91a
DJ
14410 first_gp = info_ptr->first_gp_reg_save;
14411
14412 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14413
a3170dc6
AH
14414 /* For the SPE, we have an additional upper 32-bits on each GPR.
14415 Ideally we should save the entire 64-bits only when the upper
14416 half is used in SIMD instructions. Since we only record
14417 registers live (not the size they are used in), this proves
14418 difficult because we'd have to traverse the instruction chain at
14419 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14420 so we opt to save the GPRs in 64-bits always if but one register
14421 gets used in 64-bits. Otherwise, all the registers in the frame
14422 get saved in 32-bits.
a3170dc6 14423
c19de7aa 14424 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14425 traditional GP save area will be empty. */
c19de7aa 14426 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14427 info_ptr->gp_size = 0;
14428
4697a36c
MM
14429 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14430 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14431
00b960c7
AH
14432 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14433 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14434 - info_ptr->first_altivec_reg_save);
14435
592696dd 14436 /* Does this function call anything? */
71f123ca
FS
14437 info_ptr->calls_p = (! current_function_is_leaf
14438 || cfun->machine->ra_needs_full_frame);
b6c9286a 14439
a4f6c312 14440 /* Determine if we need to save the link register. */
022123e6
AM
14441 if ((DEFAULT_ABI == ABI_AIX
14442 && current_function_profile
14443 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14444#ifdef TARGET_RELOCATABLE
14445 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14446#endif
14447 || (info_ptr->first_fp_reg_save != 64
14448 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14449 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14450 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14451 || info_ptr->calls_p
14452 || rs6000_ra_ever_killed ())
4697a36c
MM
14453 {
14454 info_ptr->lr_save_p = 1;
1de43f85 14455 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14456 }
14457
9ebbca7d 14458 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14459 if (df_regs_ever_live_p (CR2_REGNO)
14460 || df_regs_ever_live_p (CR3_REGNO)
14461 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14462 {
14463 info_ptr->cr_save_p = 1;
178274da 14464 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14465 info_ptr->cr_size = reg_size;
14466 }
14467
83720594
RH
14468 /* If the current function calls __builtin_eh_return, then we need
14469 to allocate stack space for registers that will hold data for
14470 the exception handler. */
14471 if (current_function_calls_eh_return)
14472 {
14473 unsigned int i;
14474 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14475 continue;
a3170dc6
AH
14476
14477 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14478 ehrd_size = i * (TARGET_SPE_ABI
14479 && info_ptr->spe_64bit_regs_used != 0
14480 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14481 }
14482 else
14483 ehrd_size = 0;
14484
592696dd 14485 /* Determine various sizes. */
4697a36c
MM
14486 info_ptr->reg_size = reg_size;
14487 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14488 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14489 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14490 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14491 if (FRAME_GROWS_DOWNWARD)
14492 info_ptr->vars_size
5b667039
JJ
14493 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14494 + info_ptr->parm_size,
7d5175e1 14495 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14496 - (info_ptr->fixed_size + info_ptr->vars_size
14497 + info_ptr->parm_size);
00b960c7 14498
c19de7aa 14499 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14500 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14501 else
14502 info_ptr->spe_gp_size = 0;
14503
4d774ff8
HP
14504 if (TARGET_ALTIVEC_ABI)
14505 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14506 else
4d774ff8
HP
14507 info_ptr->vrsave_mask = 0;
14508
14509 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14510 info_ptr->vrsave_size = 4;
14511 else
14512 info_ptr->vrsave_size = 0;
b6c9286a 14513
d62294f5
FJ
14514 compute_save_world_info (info_ptr);
14515
592696dd 14516 /* Calculate the offsets. */
178274da 14517 switch (DEFAULT_ABI)
4697a36c 14518 {
b6c9286a 14519 case ABI_NONE:
24d304eb 14520 default:
37409796 14521 gcc_unreachable ();
b6c9286a
MM
14522
14523 case ABI_AIX:
ee890fe2 14524 case ABI_DARWIN:
b6c9286a
MM
14525 info_ptr->fp_save_offset = - info_ptr->fp_size;
14526 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14527
14528 if (TARGET_ALTIVEC_ABI)
14529 {
14530 info_ptr->vrsave_save_offset
14531 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14532
982afe02 14533 /* Align stack so vector save area is on a quadword boundary.
9278121c 14534 The padding goes above the vectors. */
00b960c7
AH
14535 if (info_ptr->altivec_size != 0)
14536 info_ptr->altivec_padding_size
9278121c 14537 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14538 else
14539 info_ptr->altivec_padding_size = 0;
14540
14541 info_ptr->altivec_save_offset
14542 = info_ptr->vrsave_save_offset
14543 - info_ptr->altivec_padding_size
14544 - info_ptr->altivec_size;
9278121c
GK
14545 gcc_assert (info_ptr->altivec_size == 0
14546 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14547
14548 /* Adjust for AltiVec case. */
14549 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14550 }
14551 else
14552 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14553 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14554 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14555 break;
14556
14557 case ABI_V4:
b6c9286a
MM
14558 info_ptr->fp_save_offset = - info_ptr->fp_size;
14559 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14560 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14561
c19de7aa 14562 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14563 {
14564 /* Align stack so SPE GPR save area is aligned on a
14565 double-word boundary. */
14566 if (info_ptr->spe_gp_size != 0)
14567 info_ptr->spe_padding_size
14568 = 8 - (-info_ptr->cr_save_offset % 8);
14569 else
14570 info_ptr->spe_padding_size = 0;
14571
14572 info_ptr->spe_gp_save_offset
14573 = info_ptr->cr_save_offset
14574 - info_ptr->spe_padding_size
14575 - info_ptr->spe_gp_size;
14576
14577 /* Adjust for SPE case. */
022123e6 14578 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14579 }
a3170dc6 14580 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14581 {
14582 info_ptr->vrsave_save_offset
14583 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14584
14585 /* Align stack so vector save area is on a quadword boundary. */
14586 if (info_ptr->altivec_size != 0)
14587 info_ptr->altivec_padding_size
14588 = 16 - (-info_ptr->vrsave_save_offset % 16);
14589 else
14590 info_ptr->altivec_padding_size = 0;
14591
14592 info_ptr->altivec_save_offset
14593 = info_ptr->vrsave_save_offset
14594 - info_ptr->altivec_padding_size
14595 - info_ptr->altivec_size;
14596
14597 /* Adjust for AltiVec case. */
022123e6 14598 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14599 }
14600 else
022123e6
AM
14601 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14602 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14603 info_ptr->lr_save_offset = reg_size;
14604 break;
4697a36c
MM
14605 }
14606
64045029 14607 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14608 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14609 + info_ptr->gp_size
14610 + info_ptr->altivec_size
14611 + info_ptr->altivec_padding_size
a3170dc6
AH
14612 + info_ptr->spe_gp_size
14613 + info_ptr->spe_padding_size
00b960c7
AH
14614 + ehrd_size
14615 + info_ptr->cr_size
022123e6 14616 + info_ptr->vrsave_size,
64045029 14617 save_align);
00b960c7 14618
44688022 14619 non_fixed_size = (info_ptr->vars_size
ff381587 14620 + info_ptr->parm_size
5b667039 14621 + info_ptr->save_size);
ff381587 14622
44688022
AM
14623 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14624 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14625
14626 /* Determine if we need to allocate any stack frame:
14627
a4f6c312
SS
14628 For AIX we need to push the stack if a frame pointer is needed
14629 (because the stack might be dynamically adjusted), if we are
14630 debugging, if we make calls, or if the sum of fp_save, gp_save,
14631 and local variables are more than the space needed to save all
14632 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14633 + 18*8 = 288 (GPR13 reserved).
ff381587 14634
a4f6c312
SS
14635 For V.4 we don't have the stack cushion that AIX uses, but assume
14636 that the debugger can handle stackless frames. */
ff381587
MM
14637
14638 if (info_ptr->calls_p)
14639 info_ptr->push_p = 1;
14640
178274da 14641 else if (DEFAULT_ABI == ABI_V4)
44688022 14642 info_ptr->push_p = non_fixed_size != 0;
ff381587 14643
178274da
AM
14644 else if (frame_pointer_needed)
14645 info_ptr->push_p = 1;
14646
14647 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14648 info_ptr->push_p = 1;
14649
ff381587 14650 else
44688022 14651 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14652
a4f6c312 14653 /* Zero offsets if we're not saving those registers. */
8dda1a21 14654 if (info_ptr->fp_size == 0)
4697a36c
MM
14655 info_ptr->fp_save_offset = 0;
14656
8dda1a21 14657 if (info_ptr->gp_size == 0)
4697a36c
MM
14658 info_ptr->gp_save_offset = 0;
14659
00b960c7
AH
14660 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14661 info_ptr->altivec_save_offset = 0;
14662
14663 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14664 info_ptr->vrsave_save_offset = 0;
14665
c19de7aa
AH
14666 if (! TARGET_SPE_ABI
14667 || info_ptr->spe_64bit_regs_used == 0
14668 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14669 info_ptr->spe_gp_save_offset = 0;
14670
c81fc13e 14671 if (! info_ptr->lr_save_p)
4697a36c
MM
14672 info_ptr->lr_save_offset = 0;
14673
c81fc13e 14674 if (! info_ptr->cr_save_p)
4697a36c
MM
14675 info_ptr->cr_save_offset = 0;
14676
14677 return info_ptr;
14678}
14679
c19de7aa
AH
14680/* Return true if the current function uses any GPRs in 64-bit SIMD
14681 mode. */
14682
14683static bool
863d938c 14684spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14685{
14686 rtx insns, insn;
14687
14688 /* Functions that save and restore all the call-saved registers will
14689 need to save/restore the registers in 64-bits. */
14690 if (current_function_calls_eh_return
14691 || current_function_calls_setjmp
14692 || current_function_has_nonlocal_goto)
14693 return true;
14694
14695 insns = get_insns ();
14696
14697 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14698 {
14699 if (INSN_P (insn))
14700 {
14701 rtx i;
14702
b5a5beb9
AH
14703 /* FIXME: This should be implemented with attributes...
14704
14705 (set_attr "spe64" "true")....then,
14706 if (get_spe64(insn)) return true;
14707
14708 It's the only reliable way to do the stuff below. */
14709
c19de7aa 14710 i = PATTERN (insn);
f82f556d
AH
14711 if (GET_CODE (i) == SET)
14712 {
14713 enum machine_mode mode = GET_MODE (SET_SRC (i));
14714
14715 if (SPE_VECTOR_MODE (mode))
14716 return true;
4d4447b5
PB
14717 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14718 || mode == DDmode || mode == TDmode))
f82f556d
AH
14719 return true;
14720 }
c19de7aa
AH
14721 }
14722 }
14723
14724 return false;
14725}
14726
d1d0c603 14727static void
a2369ed3 14728debug_stack_info (rs6000_stack_t *info)
9878760c 14729{
d330fd93 14730 const char *abi_string;
24d304eb 14731
c81fc13e 14732 if (! info)
4697a36c
MM
14733 info = rs6000_stack_info ();
14734
14735 fprintf (stderr, "\nStack information for function %s:\n",
14736 ((current_function_decl && DECL_NAME (current_function_decl))
14737 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14738 : "<unknown>"));
14739
24d304eb
RK
14740 switch (info->abi)
14741 {
b6c9286a
MM
14742 default: abi_string = "Unknown"; break;
14743 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14744 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14745 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14746 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14747 }
14748
14749 fprintf (stderr, "\tABI = %5s\n", abi_string);
14750
00b960c7
AH
14751 if (TARGET_ALTIVEC_ABI)
14752 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14753
a3170dc6
AH
14754 if (TARGET_SPE_ABI)
14755 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14756
4697a36c
MM
14757 if (info->first_gp_reg_save != 32)
14758 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14759
14760 if (info->first_fp_reg_save != 64)
14761 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14762
00b960c7
AH
14763 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14764 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14765 info->first_altivec_reg_save);
14766
4697a36c
MM
14767 if (info->lr_save_p)
14768 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14769
4697a36c
MM
14770 if (info->cr_save_p)
14771 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14772
00b960c7
AH
14773 if (info->vrsave_mask)
14774 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14775
4697a36c
MM
14776 if (info->push_p)
14777 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14778
14779 if (info->calls_p)
14780 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14781
4697a36c
MM
14782 if (info->gp_save_offset)
14783 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14784
14785 if (info->fp_save_offset)
14786 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14787
00b960c7
AH
14788 if (info->altivec_save_offset)
14789 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14790 info->altivec_save_offset);
14791
a3170dc6
AH
14792 if (info->spe_gp_save_offset)
14793 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14794 info->spe_gp_save_offset);
14795
00b960c7
AH
14796 if (info->vrsave_save_offset)
14797 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14798 info->vrsave_save_offset);
14799
4697a36c
MM
14800 if (info->lr_save_offset)
14801 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14802
14803 if (info->cr_save_offset)
14804 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14805
14806 if (info->varargs_save_offset)
14807 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14808
14809 if (info->total_size)
d1d0c603
JJ
14810 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14811 info->total_size);
4697a36c 14812
4697a36c 14813 if (info->vars_size)
d1d0c603
JJ
14814 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14815 info->vars_size);
4697a36c
MM
14816
14817 if (info->parm_size)
14818 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14819
14820 if (info->fixed_size)
14821 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14822
14823 if (info->gp_size)
14824 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14825
a3170dc6
AH
14826 if (info->spe_gp_size)
14827 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14828
4697a36c
MM
14829 if (info->fp_size)
14830 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14831
00b960c7
AH
14832 if (info->altivec_size)
14833 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14834
14835 if (info->vrsave_size)
14836 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14837
14838 if (info->altivec_padding_size)
14839 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14840 info->altivec_padding_size);
14841
a3170dc6
AH
14842 if (info->spe_padding_size)
14843 fprintf (stderr, "\tspe_padding_size = %5d\n",
14844 info->spe_padding_size);
14845
4697a36c
MM
14846 if (info->cr_size)
14847 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14848
14849 if (info->save_size)
14850 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14851
14852 if (info->reg_size != 4)
14853 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14854
14855 fprintf (stderr, "\n");
9878760c 14856}
71f123ca
FS
14857
14858rtx
a2369ed3 14859rs6000_return_addr (int count, rtx frame)
71f123ca 14860{
a4f6c312
SS
14861 /* Currently we don't optimize very well between prolog and body
14862 code and for PIC code the code can be actually quite bad, so
14863 don't try to be too clever here. */
f1384257 14864 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14865 {
14866 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14867
14868 return
14869 gen_rtx_MEM
14870 (Pmode,
14871 memory_address
14872 (Pmode,
14873 plus_constant (copy_to_reg
14874 (gen_rtx_MEM (Pmode,
14875 memory_address (Pmode, frame))),
14876 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14877 }
14878
8c29550d 14879 cfun->machine->ra_need_lr = 1;
1de43f85 14880 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14881}
14882
5e1bf043
DJ
14883/* Say whether a function is a candidate for sibcall handling or not.
14884 We do not allow indirect calls to be optimized into sibling calls.
14885 Also, we can't do it if there are any vector parameters; there's
14886 nowhere to put the VRsave code so it works; note that functions with
14887 vector parameters are required to have a prototype, so the argument
14888 type info must be available here. (The tail recursion case can work
14889 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14890static bool
a2369ed3 14891rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14892{
14893 tree type;
4977bab6 14894 if (decl)
5e1bf043
DJ
14895 {
14896 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14897 {
4977bab6 14898 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14899 type; type = TREE_CHAIN (type))
14900 {
c15b529f 14901 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14902 return false;
5e1bf043 14903 }
c4ad648e 14904 }
5e1bf043 14905 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14906 || ((*targetm.binds_local_p) (decl)
14907 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14908 {
4977bab6 14909 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14910
14911 if (!lookup_attribute ("longcall", attr_list)
14912 || lookup_attribute ("shortcall", attr_list))
4977bab6 14913 return true;
2bcc50d0 14914 }
5e1bf043 14915 }
4977bab6 14916 return false;
5e1bf043
DJ
14917}
14918
e7e64a25
AS
14919/* NULL if INSN insn is valid within a low-overhead loop.
14920 Otherwise return why doloop cannot be applied.
9419649c
DE
14921 PowerPC uses the COUNT register for branch on table instructions. */
14922
e7e64a25 14923static const char *
3101faab 14924rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14925{
14926 if (CALL_P (insn))
e7e64a25 14927 return "Function call in the loop.";
9419649c
DE
14928
14929 if (JUMP_P (insn)
14930 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14931 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14932 return "Computed branch in the loop.";
9419649c 14933
e7e64a25 14934 return NULL;
9419649c
DE
14935}
14936
71f123ca 14937static int
863d938c 14938rs6000_ra_ever_killed (void)
71f123ca
FS
14939{
14940 rtx top;
5e1bf043
DJ
14941 rtx reg;
14942 rtx insn;
71f123ca 14943
dd292d0a 14944 if (current_function_is_thunk)
71f123ca 14945 return 0;
eb0424da 14946
36f7e964
AH
14947 /* regs_ever_live has LR marked as used if any sibcalls are present,
14948 but this should not force saving and restoring in the
14949 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14950 clobbers LR, so that is inappropriate. */
36f7e964 14951
5e1bf043
DJ
14952 /* Also, the prologue can generate a store into LR that
14953 doesn't really count, like this:
36f7e964 14954
5e1bf043
DJ
14955 move LR->R0
14956 bcl to set PIC register
14957 move LR->R31
14958 move R0->LR
36f7e964
AH
14959
14960 When we're called from the epilogue, we need to avoid counting
14961 this as a store. */
f676971a 14962
71f123ca
FS
14963 push_topmost_sequence ();
14964 top = get_insns ();
14965 pop_topmost_sequence ();
1de43f85 14966 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14967
5e1bf043
DJ
14968 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14969 {
14970 if (INSN_P (insn))
14971 {
022123e6
AM
14972 if (CALL_P (insn))
14973 {
14974 if (!SIBLING_CALL_P (insn))
14975 return 1;
14976 }
1de43f85 14977 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14978 return 1;
36f7e964
AH
14979 else if (set_of (reg, insn) != NULL_RTX
14980 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14981 return 1;
14982 }
14983 }
14984 return 0;
71f123ca 14985}
4697a36c 14986\f
9ebbca7d 14987/* Emit instructions needed to load the TOC register.
c7ca610e 14988 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14989 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14990
14991void
a2369ed3 14992rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14993{
6fb5fa3c 14994 rtx dest;
1db02437 14995 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14996
7f970b70 14997 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14998 {
7f970b70 14999 char buf[30];
e65a3857 15000 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15001
15002 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15003 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15004 if (flag_pic == 2)
15005 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15006 else
15007 got = rs6000_got_sym ();
15008 tmp1 = tmp2 = dest;
15009 if (!fromprolog)
15010 {
15011 tmp1 = gen_reg_rtx (Pmode);
15012 tmp2 = gen_reg_rtx (Pmode);
15013 }
6fb5fa3c
DB
15014 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15015 emit_move_insn (tmp1,
1de43f85 15016 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15017 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15018 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15019 }
15020 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15021 {
6fb5fa3c 15022 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15023 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15024 }
15025 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15026 {
15027 char buf[30];
20b71b17
AM
15028 rtx temp0 = (fromprolog
15029 ? gen_rtx_REG (Pmode, 0)
15030 : gen_reg_rtx (Pmode));
20b71b17 15031
20b71b17
AM
15032 if (fromprolog)
15033 {
ccbca5e4 15034 rtx symF, symL;
38c1f2d7 15035
20b71b17
AM
15036 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15037 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15038
20b71b17
AM
15039 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15040 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15041
6fb5fa3c
DB
15042 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15043 emit_move_insn (dest,
1de43f85 15044 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15045 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15046 }
15047 else
20b71b17
AM
15048 {
15049 rtx tocsym;
20b71b17
AM
15050
15051 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15052 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15053 emit_move_insn (dest,
1de43f85 15054 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15055 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15056 }
6fb5fa3c 15057 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15058 }
20b71b17
AM
15059 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15060 {
15061 /* This is for AIX code running in non-PIC ELF32. */
15062 char buf[30];
15063 rtx realsym;
15064 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15065 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15066
6fb5fa3c
DB
15067 emit_insn (gen_elf_high (dest, realsym));
15068 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15069 }
37409796 15070 else
9ebbca7d 15071 {
37409796 15072 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15073
9ebbca7d 15074 if (TARGET_32BIT)
6fb5fa3c 15075 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15076 else
6fb5fa3c 15077 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15078 }
15079}
15080
d1d0c603
JJ
15081/* Emit instructions to restore the link register after determining where
15082 its value has been stored. */
15083
15084void
15085rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15086{
15087 rs6000_stack_t *info = rs6000_stack_info ();
15088 rtx operands[2];
15089
15090 operands[0] = source;
15091 operands[1] = scratch;
15092
15093 if (info->lr_save_p)
15094 {
15095 rtx frame_rtx = stack_pointer_rtx;
15096 HOST_WIDE_INT sp_offset = 0;
15097 rtx tmp;
15098
15099 if (frame_pointer_needed
15100 || current_function_calls_alloca
15101 || info->total_size > 32767)
15102 {
0be76840 15103 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15104 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15105 frame_rtx = operands[1];
15106 }
15107 else if (info->push_p)
15108 sp_offset = info->total_size;
15109
15110 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15111 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15112 emit_move_insn (tmp, operands[0]);
15113 }
15114 else
1de43f85 15115 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15116}
15117
4862826d 15118static GTY(()) alias_set_type set = -1;
f103e34d 15119
4862826d 15120alias_set_type
863d938c 15121get_TOC_alias_set (void)
9ebbca7d 15122{
f103e34d
GK
15123 if (set == -1)
15124 set = new_alias_set ();
15125 return set;
f676971a 15126}
9ebbca7d 15127
c1207243 15128/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15129 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15130 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15131#if TARGET_ELF
3c9eb5f4 15132static int
f676971a 15133uses_TOC (void)
9ebbca7d 15134{
c4501e62 15135 rtx insn;
38c1f2d7 15136
c4501e62
JJ
15137 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15138 if (INSN_P (insn))
15139 {
15140 rtx pat = PATTERN (insn);
15141 int i;
9ebbca7d 15142
f676971a 15143 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15144 for (i = 0; i < XVECLEN (pat, 0); i++)
15145 {
15146 rtx sub = XVECEXP (pat, 0, i);
15147 if (GET_CODE (sub) == USE)
15148 {
15149 sub = XEXP (sub, 0);
15150 if (GET_CODE (sub) == UNSPEC
15151 && XINT (sub, 1) == UNSPEC_TOC)
15152 return 1;
15153 }
15154 }
15155 }
15156 return 0;
9ebbca7d 15157}
c954844a 15158#endif
38c1f2d7 15159
9ebbca7d 15160rtx
f676971a 15161create_TOC_reference (rtx symbol)
9ebbca7d 15162{
b3a13419 15163 if (!can_create_pseudo_p ())
6fb5fa3c 15164 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15165 return gen_rtx_PLUS (Pmode,
a8a05998 15166 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15167 gen_rtx_CONST (Pmode,
15168 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15169 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15170}
38c1f2d7 15171
fc4767bb
JJ
15172/* If _Unwind_* has been called from within the same module,
15173 toc register is not guaranteed to be saved to 40(1) on function
15174 entry. Save it there in that case. */
c7ca610e 15175
9ebbca7d 15176void
863d938c 15177rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15178{
15179 rtx mem;
15180 rtx stack_top = gen_reg_rtx (Pmode);
15181 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15182 rtx opcode = gen_reg_rtx (SImode);
15183 rtx tocompare = gen_reg_rtx (SImode);
15184 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15185
8308679f 15186 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15187 emit_move_insn (stack_top, mem);
15188
8308679f
DE
15189 mem = gen_frame_mem (Pmode,
15190 gen_rtx_PLUS (Pmode, stack_top,
15191 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15192 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15193 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15194 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15195 : 0xE8410028, SImode));
9ebbca7d 15196
fc4767bb 15197 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15198 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15199 no_toc_save_needed);
9ebbca7d 15200
8308679f
DE
15201 mem = gen_frame_mem (Pmode,
15202 gen_rtx_PLUS (Pmode, stack_top,
15203 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15204 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15205 emit_label (no_toc_save_needed);
9ebbca7d 15206}
38c1f2d7 15207\f
0be76840
DE
15208/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15209 and the change to the stack pointer. */
ba4828e0 15210
9ebbca7d 15211static void
863d938c 15212rs6000_emit_stack_tie (void)
9ebbca7d 15213{
0be76840
DE
15214 rtx mem = gen_frame_mem (BLKmode,
15215 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15216
9ebbca7d
GK
15217 emit_insn (gen_stack_tie (mem));
15218}
38c1f2d7 15219
9ebbca7d
GK
15220/* Emit the correct code for allocating stack space, as insns.
15221 If COPY_R12, make sure a copy of the old frame is left in r12.
15222 The generated code may use hard register 0 as a temporary. */
15223
15224static void
a2369ed3 15225rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 15226{
9ebbca7d
GK
15227 rtx insn;
15228 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15229 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15230 rtx todec = gen_int_mode (-size, Pmode);
15231
15232 if (INTVAL (todec) != -size)
15233 {
d4ee4d25 15234 warning (0, "stack frame too large");
61168ff1
RS
15235 emit_insn (gen_trap ());
15236 return;
15237 }
a157febd
GK
15238
15239 if (current_function_limit_stack)
15240 {
15241 if (REG_P (stack_limit_rtx)
f676971a 15242 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15243 && REGNO (stack_limit_rtx) <= 31)
15244 {
5b71a4e7 15245 emit_insn (TARGET_32BIT
9ebbca7d
GK
15246 ? gen_addsi3 (tmp_reg,
15247 stack_limit_rtx,
15248 GEN_INT (size))
15249 : gen_adddi3 (tmp_reg,
15250 stack_limit_rtx,
15251 GEN_INT (size)));
5b71a4e7 15252
9ebbca7d
GK
15253 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15254 const0_rtx));
a157febd
GK
15255 }
15256 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15257 && TARGET_32BIT
f607bc57 15258 && DEFAULT_ABI == ABI_V4)
a157febd 15259 {
9ebbca7d 15260 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15261 gen_rtx_PLUS (Pmode,
15262 stack_limit_rtx,
9ebbca7d 15263 GEN_INT (size)));
5b71a4e7 15264
9ebbca7d
GK
15265 emit_insn (gen_elf_high (tmp_reg, toload));
15266 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15267 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15268 const0_rtx));
a157febd
GK
15269 }
15270 else
d4ee4d25 15271 warning (0, "stack limit expression is not supported");
a157febd
GK
15272 }
15273
9ebbca7d
GK
15274 if (copy_r12 || ! TARGET_UPDATE)
15275 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15276
38c1f2d7
MM
15277 if (TARGET_UPDATE)
15278 {
9ebbca7d 15279 if (size > 32767)
38c1f2d7 15280 {
9ebbca7d 15281 /* Need a note here so that try_split doesn't get confused. */
9390387d 15282 if (get_last_insn () == NULL_RTX)
2e040219 15283 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15284 insn = emit_move_insn (tmp_reg, todec);
15285 try_split (PATTERN (insn), insn, 0);
15286 todec = tmp_reg;
38c1f2d7 15287 }
5b71a4e7
DE
15288
15289 insn = emit_insn (TARGET_32BIT
15290 ? gen_movsi_update (stack_reg, stack_reg,
15291 todec, stack_reg)
c4ad648e 15292 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15293 todec, stack_reg));
38c1f2d7
MM
15294 }
15295 else
15296 {
5b71a4e7
DE
15297 insn = emit_insn (TARGET_32BIT
15298 ? gen_addsi3 (stack_reg, stack_reg, todec)
15299 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15300 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15301 gen_rtx_REG (Pmode, 12));
15302 }
f676971a 15303
9ebbca7d 15304 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15305 REG_NOTES (insn) =
9ebbca7d 15306 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15307 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15308 gen_rtx_PLUS (Pmode, stack_reg,
15309 GEN_INT (-size))),
15310 REG_NOTES (insn));
15311}
15312
a4f6c312
SS
15313/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15314 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15315 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15316 deduce these equivalences by itself so it wasn't necessary to hold
15317 its hand so much. */
9ebbca7d
GK
15318
15319static void
f676971a 15320rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15321 rtx reg2, rtx rreg)
9ebbca7d
GK
15322{
15323 rtx real, temp;
15324
e56c4463
JL
15325 /* copy_rtx will not make unique copies of registers, so we need to
15326 ensure we don't have unwanted sharing here. */
15327 if (reg == reg2)
15328 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15329
15330 if (reg == rreg)
15331 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15332
9ebbca7d
GK
15333 real = copy_rtx (PATTERN (insn));
15334
89e7058f
AH
15335 if (reg2 != NULL_RTX)
15336 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15337
15338 real = replace_rtx (real, reg,
9ebbca7d
GK
15339 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15340 STACK_POINTER_REGNUM),
15341 GEN_INT (val)));
f676971a 15342
9ebbca7d
GK
15343 /* We expect that 'real' is either a SET or a PARALLEL containing
15344 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15345 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15346
15347 if (GET_CODE (real) == SET)
15348 {
15349 rtx set = real;
f676971a 15350
9ebbca7d
GK
15351 temp = simplify_rtx (SET_SRC (set));
15352 if (temp)
15353 SET_SRC (set) = temp;
15354 temp = simplify_rtx (SET_DEST (set));
15355 if (temp)
15356 SET_DEST (set) = temp;
15357 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15358 {
9ebbca7d
GK
15359 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15360 if (temp)
15361 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15362 }
38c1f2d7 15363 }
37409796 15364 else
9ebbca7d
GK
15365 {
15366 int i;
37409796
NS
15367
15368 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15369 for (i = 0; i < XVECLEN (real, 0); i++)
15370 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15371 {
15372 rtx set = XVECEXP (real, 0, i);
f676971a 15373
9ebbca7d
GK
15374 temp = simplify_rtx (SET_SRC (set));
15375 if (temp)
15376 SET_SRC (set) = temp;
15377 temp = simplify_rtx (SET_DEST (set));
15378 if (temp)
15379 SET_DEST (set) = temp;
15380 if (GET_CODE (SET_DEST (set)) == MEM)
15381 {
15382 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15383 if (temp)
15384 XEXP (SET_DEST (set), 0) = temp;
15385 }
15386 RTX_FRAME_RELATED_P (set) = 1;
15387 }
15388 }
c19de7aa 15389
9ebbca7d
GK
15390 RTX_FRAME_RELATED_P (insn) = 1;
15391 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15392 real,
15393 REG_NOTES (insn));
38c1f2d7
MM
15394}
15395
00b960c7
AH
15396/* Returns an insn that has a vrsave set operation with the
15397 appropriate CLOBBERs. */
15398
15399static rtx
a2369ed3 15400generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15401{
15402 int nclobs, i;
15403 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15404 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15405
a004eb82
AH
15406 clobs[0]
15407 = gen_rtx_SET (VOIDmode,
15408 vrsave,
15409 gen_rtx_UNSPEC_VOLATILE (SImode,
15410 gen_rtvec (2, reg, vrsave),
3aca4bff 15411 UNSPECV_SET_VRSAVE));
00b960c7
AH
15412
15413 nclobs = 1;
15414
9aa86737
AH
15415 /* We need to clobber the registers in the mask so the scheduler
15416 does not move sets to VRSAVE before sets of AltiVec registers.
15417
15418 However, if the function receives nonlocal gotos, reload will set
15419 all call saved registers live. We will end up with:
15420
15421 (set (reg 999) (mem))
15422 (parallel [ (set (reg vrsave) (unspec blah))
15423 (clobber (reg 999))])
15424
15425 The clobber will cause the store into reg 999 to be dead, and
15426 flow will attempt to delete an epilogue insn. In this case, we
15427 need an unspec use/set of the register. */
00b960c7
AH
15428
15429 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15430 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15431 {
15432 if (!epiloguep || call_used_regs [i])
15433 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15434 gen_rtx_REG (V4SImode, i));
15435 else
15436 {
15437 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15438
15439 clobs[nclobs++]
a004eb82
AH
15440 = gen_rtx_SET (VOIDmode,
15441 reg,
15442 gen_rtx_UNSPEC (V4SImode,
15443 gen_rtvec (1, reg), 27));
9aa86737
AH
15444 }
15445 }
00b960c7
AH
15446
15447 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15448
15449 for (i = 0; i < nclobs; ++i)
15450 XVECEXP (insn, 0, i) = clobs[i];
15451
15452 return insn;
15453}
15454
89e7058f
AH
15455/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15456 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15457
15458static void
f676971a 15459emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15460 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15461{
15462 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15463 rtx replacea, replaceb;
15464
15465 int_rtx = GEN_INT (offset);
15466
15467 /* Some cases that need register indexed addressing. */
15468 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15469 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15470 || (TARGET_SPE_ABI
15471 && SPE_VECTOR_MODE (mode)
15472 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15473 {
15474 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15475 flow path of instructions in the prologue. */
89e7058f
AH
15476 offset_rtx = gen_rtx_REG (Pmode, 11);
15477 emit_move_insn (offset_rtx, int_rtx);
15478
15479 replacea = offset_rtx;
15480 replaceb = int_rtx;
15481 }
15482 else
15483 {
15484 offset_rtx = int_rtx;
15485 replacea = NULL_RTX;
15486 replaceb = NULL_RTX;
15487 }
15488
15489 reg = gen_rtx_REG (mode, regno);
15490 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15491 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15492
15493 insn = emit_move_insn (mem, reg);
15494
15495 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15496}
15497
a3170dc6
AH
15498/* Emit an offset memory reference suitable for a frame store, while
15499 converting to a valid addressing mode. */
15500
15501static rtx
a2369ed3 15502gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15503{
15504 rtx int_rtx, offset_rtx;
15505
15506 int_rtx = GEN_INT (offset);
15507
4d4cbc0e 15508 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15509 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15510 {
15511 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15512 emit_move_insn (offset_rtx, int_rtx);
15513 }
15514 else
15515 offset_rtx = int_rtx;
15516
0be76840 15517 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15518}
15519
6d0a8091
DJ
15520/* Look for user-defined global regs. We should not save and restore these,
15521 and cannot use stmw/lmw if there are any in its range. */
15522
15523static bool
15524no_global_regs_above (int first_greg)
15525{
15526 int i;
15527 for (i = 0; i < 32 - first_greg; i++)
15528 if (global_regs[first_greg + i])
15529 return false;
15530 return true;
15531}
15532
699c914a
MS
15533#ifndef TARGET_FIX_AND_CONTINUE
15534#define TARGET_FIX_AND_CONTINUE 0
15535#endif
15536
52ff33d0
NF
15537/* Determine whether the gp REG is really used. */
15538
15539static bool
15540rs6000_reg_live_or_pic_offset_p (int reg)
15541{
6fb5fa3c 15542 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15543 && (!call_used_regs[reg]
15544 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15545 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15546 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15547 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15548 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15549}
15550
9ebbca7d
GK
15551/* Emit function prologue as insns. */
15552
9878760c 15553void
863d938c 15554rs6000_emit_prologue (void)
9878760c 15555{
4697a36c 15556 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15557 enum machine_mode reg_mode = Pmode;
327e5343 15558 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15559 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15560 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15561 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15562 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15563 rtx insn;
15564 int saving_FPRs_inline;
15565 int using_store_multiple;
15566 HOST_WIDE_INT sp_offset = 0;
f676971a 15567
699c914a
MS
15568 if (TARGET_FIX_AND_CONTINUE)
15569 {
15570 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15571 address by modifying the first 5 instructions of the function
699c914a
MS
15572 to branch to the overriding function. This is necessary to
15573 permit function pointers that point to the old function to
15574 actually forward to the new function. */
15575 emit_insn (gen_nop ());
15576 emit_insn (gen_nop ());
de2ab0ca 15577 emit_insn (gen_nop ());
699c914a
MS
15578 emit_insn (gen_nop ());
15579 emit_insn (gen_nop ());
15580 }
15581
15582 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15583 {
15584 reg_mode = V2SImode;
15585 reg_size = 8;
15586 }
a3170dc6 15587
9ebbca7d 15588 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15589 && (!TARGET_SPE_ABI
15590 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15591 && info->first_gp_reg_save < 31
15592 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15593 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15594 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15595 || current_function_calls_eh_return
8c29550d 15596 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15597
15598 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15599 if (! WORLD_SAVE_P (info)
15600 && info->push_p
acd0b319
AM
15601 && (DEFAULT_ABI == ABI_V4
15602 || current_function_calls_eh_return))
9ebbca7d
GK
15603 {
15604 if (info->total_size < 32767)
15605 sp_offset = info->total_size;
15606 else
15607 frame_reg_rtx = frame_ptr_rtx;
f676971a 15608 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15609 (frame_reg_rtx != sp_reg_rtx
15610 && (info->cr_save_p
15611 || info->lr_save_p
15612 || info->first_fp_reg_save < 64
15613 || info->first_gp_reg_save < 32
15614 )));
15615 if (frame_reg_rtx != sp_reg_rtx)
15616 rs6000_emit_stack_tie ();
15617 }
15618
d62294f5 15619 /* Handle world saves specially here. */
f57fe068 15620 if (WORLD_SAVE_P (info))
d62294f5
FJ
15621 {
15622 int i, j, sz;
15623 rtx treg;
15624 rtvec p;
22fa69da 15625 rtx reg0;
d62294f5
FJ
15626
15627 /* save_world expects lr in r0. */
22fa69da 15628 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15629 if (info->lr_save_p)
c4ad648e 15630 {
22fa69da 15631 insn = emit_move_insn (reg0,
1de43f85 15632 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15633 RTX_FRAME_RELATED_P (insn) = 1;
15634 }
d62294f5
FJ
15635
15636 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15637 assumptions about the offsets of various bits of the stack
992d08b1 15638 frame. */
37409796
NS
15639 gcc_assert (info->gp_save_offset == -220
15640 && info->fp_save_offset == -144
15641 && info->lr_save_offset == 8
15642 && info->cr_save_offset == 4
15643 && info->push_p
15644 && info->lr_save_p
15645 && (!current_function_calls_eh_return
15646 || info->ehrd_offset == -432)
15647 && info->vrsave_save_offset == -224
22fa69da 15648 && info->altivec_save_offset == -416);
d62294f5
FJ
15649
15650 treg = gen_rtx_REG (SImode, 11);
15651 emit_move_insn (treg, GEN_INT (-info->total_size));
15652
15653 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15654 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15655
15656 /* Preserve CR2 for save_world prologues */
22fa69da 15657 sz = 5;
d62294f5
FJ
15658 sz += 32 - info->first_gp_reg_save;
15659 sz += 64 - info->first_fp_reg_save;
15660 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15661 p = rtvec_alloc (sz);
15662 j = 0;
15663 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15664 gen_rtx_REG (SImode,
1de43f85 15665 LR_REGNO));
d62294f5 15666 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15667 gen_rtx_SYMBOL_REF (Pmode,
15668 "*save_world"));
d62294f5 15669 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15670 properly. */
15671 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15672 {
15673 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15674 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15675 GEN_INT (info->fp_save_offset
15676 + sp_offset + 8 * i));
0be76840 15677 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15678
15679 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15680 }
d62294f5 15681 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15682 {
15683 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15684 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15685 GEN_INT (info->altivec_save_offset
15686 + sp_offset + 16 * i));
0be76840 15687 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15688
15689 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15690 }
d62294f5 15691 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15692 {
15693 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15694 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15695 GEN_INT (info->gp_save_offset
15696 + sp_offset + reg_size * i));
0be76840 15697 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15698
15699 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15700 }
15701
15702 {
15703 /* CR register traditionally saved as CR2. */
15704 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15705 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15706 GEN_INT (info->cr_save_offset
15707 + sp_offset));
0be76840 15708 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15709
15710 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15711 }
22fa69da
GK
15712 /* Explain about use of R0. */
15713 if (info->lr_save_p)
15714 {
15715 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15716 GEN_INT (info->lr_save_offset
15717 + sp_offset));
15718 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15719
22fa69da
GK
15720 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15721 }
15722 /* Explain what happens to the stack pointer. */
15723 {
15724 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15725 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15726 }
d62294f5
FJ
15727
15728 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15729 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15730 treg, GEN_INT (-info->total_size));
15731 sp_offset = info->total_size;
d62294f5
FJ
15732 }
15733
9ebbca7d 15734 /* If we use the link register, get it into r0. */
f57fe068 15735 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15736 {
52ff33d0
NF
15737 rtx addr, reg, mem;
15738
f8a57be8 15739 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15740 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15741 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15742
15743 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15744 GEN_INT (info->lr_save_offset + sp_offset));
15745 reg = gen_rtx_REG (Pmode, 0);
15746 mem = gen_rtx_MEM (Pmode, addr);
15747 /* This should not be of rs6000_sr_alias_set, because of
15748 __builtin_return_address. */
15749
15750 insn = emit_move_insn (mem, reg);
15751 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15752 NULL_RTX, NULL_RTX);
f8a57be8 15753 }
9ebbca7d
GK
15754
15755 /* If we need to save CR, put it into r12. */
f57fe068 15756 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15757 {
f8a57be8 15758 rtx set;
f676971a 15759
9ebbca7d 15760 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15761 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15762 RTX_FRAME_RELATED_P (insn) = 1;
15763 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15764 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15765 But that's OK. All we have to do is specify that _one_ condition
15766 code register is saved in this stack slot. The thrower's epilogue
15767 will then restore all the call-saved registers.
15768 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15769 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15770 gen_rtx_REG (SImode, CR2_REGNO));
15771 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15772 set,
15773 REG_NOTES (insn));
9ebbca7d
GK
15774 }
15775
a4f6c312
SS
15776 /* Do any required saving of fpr's. If only one or two to save, do
15777 it ourselves. Otherwise, call function. */
f57fe068 15778 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15779 {
15780 int i;
15781 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15782 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15783 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15784 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15785 info->first_fp_reg_save + i,
15786 info->fp_save_offset + sp_offset + 8 * i,
15787 info->total_size);
9ebbca7d 15788 }
f57fe068 15789 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15790 {
15791 int i;
15792 char rname[30];
520a57c8 15793 const char *alloc_rname;
9ebbca7d
GK
15794 rtvec p;
15795 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15796
15797 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15798 gen_rtx_REG (Pmode,
1de43f85 15799 LR_REGNO));
9ebbca7d
GK
15800 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15801 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15802 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15803 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15804 gen_rtx_SYMBOL_REF (Pmode,
15805 alloc_rname));
15806 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15807 {
15808 rtx addr, reg, mem;
15809 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15810 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15811 GEN_INT (info->fp_save_offset
9ebbca7d 15812 + sp_offset + 8*i));
0be76840 15813 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15814
15815 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15816 }
15817 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15818 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15819 NULL_RTX, NULL_RTX);
15820 }
b6c9286a 15821
9ebbca7d
GK
15822 /* Save GPRs. This is done as a PARALLEL if we are using
15823 the store-multiple instructions. */
f57fe068 15824 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15825 {
308c142a 15826 rtvec p;
9ebbca7d
GK
15827 int i;
15828 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15829 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15830 {
15831 rtx addr, reg, mem;
15832 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15833 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15834 GEN_INT (info->gp_save_offset
15835 + sp_offset
9ebbca7d 15836 + reg_size * i));
0be76840 15837 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15838
15839 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15840 }
15841 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15842 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15843 NULL_RTX, NULL_RTX);
b6c9286a 15844 }
52ff33d0
NF
15845 else if (!WORLD_SAVE_P (info)
15846 && TARGET_SPE_ABI
15847 && info->spe_64bit_regs_used != 0
15848 && info->first_gp_reg_save != 32)
15849 {
15850 int i;
15851 rtx spe_save_area_ptr;
15852 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15853 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15854 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15855
15856 /* Determine whether we can address all of the registers that need
15857 to be saved with an offset from the stack pointer that fits in
15858 the small const field for SPE memory instructions. */
15859 int spe_regs_addressable_via_sp
15860 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15861 + (32 - info->first_gp_reg_save - 1) * reg_size);
15862 int spe_offset;
15863
15864 if (spe_regs_addressable_via_sp)
15865 {
30895f30 15866 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15867 spe_offset = info->spe_gp_save_offset + sp_offset;
15868 }
15869 else
15870 {
15871 /* Make r11 point to the start of the SPE save area. We need
15872 to be careful here if r11 is holding the static chain. If
15873 it is, then temporarily save it in r0. We would use r0 as
15874 our base register here, but using r0 as a base register in
15875 loads and stores means something different from what we
15876 would like. */
15877 if (using_static_chain_p)
15878 {
15879 rtx r0 = gen_rtx_REG (Pmode, 0);
15880
15881 gcc_assert (info->first_gp_reg_save > 11);
15882
15883 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15884 }
15885
15886 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15887 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15888 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15889
15890 spe_offset = 0;
15891 }
15892
15893 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15894 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15895 {
15896 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15897 rtx offset, addr, mem;
15898
15899 /* We're doing all this to ensure that the offset fits into
15900 the immediate offset of 'evstdd'. */
15901 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15902
15903 offset = GEN_INT (reg_size * i + spe_offset);
15904 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15905 mem = gen_rtx_MEM (V2SImode, addr);
15906
15907 insn = emit_move_insn (mem, reg);
15908
15909 rs6000_frame_related (insn, spe_save_area_ptr,
15910 info->spe_gp_save_offset
15911 + sp_offset + reg_size * i,
15912 offset, const0_rtx);
15913 }
15914
15915 /* Move the static chain pointer back. */
15916 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15917 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15918 }
f57fe068 15919 else if (!WORLD_SAVE_P (info))
b6c9286a 15920 {
9ebbca7d
GK
15921 int i;
15922 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15923 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15924 {
15925 rtx addr, reg, mem;
15926 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15927
52ff33d0
NF
15928 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15929 GEN_INT (info->gp_save_offset
15930 + sp_offset
15931 + reg_size * i));
15932 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15933
52ff33d0
NF
15934 insn = emit_move_insn (mem, reg);
15935 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15936 NULL_RTX, NULL_RTX);
15937 }
9ebbca7d
GK
15938 }
15939
83720594
RH
15940 /* ??? There's no need to emit actual instructions here, but it's the
15941 easiest way to get the frame unwind information emitted. */
22fa69da 15942 if (current_function_calls_eh_return)
83720594 15943 {
78e1b90d
DE
15944 unsigned int i, regno;
15945
fc4767bb
JJ
15946 /* In AIX ABI we need to pretend we save r2 here. */
15947 if (TARGET_AIX)
15948 {
15949 rtx addr, reg, mem;
15950
15951 reg = gen_rtx_REG (reg_mode, 2);
15952 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15953 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15954 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15955
15956 insn = emit_move_insn (mem, reg);
f676971a 15957 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15958 NULL_RTX, NULL_RTX);
15959 PATTERN (insn) = gen_blockage ();
15960 }
15961
83720594
RH
15962 for (i = 0; ; ++i)
15963 {
83720594
RH
15964 regno = EH_RETURN_DATA_REGNO (i);
15965 if (regno == INVALID_REGNUM)
15966 break;
15967
89e7058f
AH
15968 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15969 info->ehrd_offset + sp_offset
15970 + reg_size * (int) i,
15971 info->total_size);
83720594
RH
15972 }
15973 }
15974
9ebbca7d 15975 /* Save CR if we use any that must be preserved. */
f57fe068 15976 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15977 {
15978 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15979 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15980 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15981 /* See the large comment above about why CR2_REGNO is used. */
15982 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15983
9ebbca7d
GK
15984 /* If r12 was used to hold the original sp, copy cr into r0 now
15985 that it's free. */
15986 if (REGNO (frame_reg_rtx) == 12)
15987 {
f8a57be8
GK
15988 rtx set;
15989
9ebbca7d 15990 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15991 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15992 RTX_FRAME_RELATED_P (insn) = 1;
15993 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15994 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15995 set,
15996 REG_NOTES (insn));
f676971a 15997
9ebbca7d
GK
15998 }
15999 insn = emit_move_insn (mem, cr_save_rtx);
16000
f676971a 16001 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16002 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16003 }
16004
f676971a 16005 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16006 for which it was done previously. */
f57fe068 16007 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 16008 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 16009 {
bcb2d701 16010 if (info->total_size < 32767)
2b2c2fe5 16011 sp_offset = info->total_size;
bcb2d701
EC
16012 else
16013 frame_reg_rtx = frame_ptr_rtx;
16014 rs6000_emit_allocate_stack (info->total_size,
16015 (frame_reg_rtx != sp_reg_rtx
16016 && ((info->altivec_size != 0)
16017 || (info->vrsave_mask != 0)
16018 )));
16019 if (frame_reg_rtx != sp_reg_rtx)
16020 rs6000_emit_stack_tie ();
2b2c2fe5 16021 }
9ebbca7d
GK
16022
16023 /* Set frame pointer, if needed. */
16024 if (frame_pointer_needed)
16025 {
7d5175e1 16026 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16027 sp_reg_rtx);
16028 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16029 }
9878760c 16030
2b2c2fe5
EC
16031 /* Save AltiVec registers if needed. Save here because the red zone does
16032 not include AltiVec registers. */
16033 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16034 {
16035 int i;
16036
16037 /* There should be a non inline version of this, for when we
16038 are saving lots of vector registers. */
16039 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16040 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16041 {
16042 rtx areg, savereg, mem;
16043 int offset;
16044
16045 offset = info->altivec_save_offset + sp_offset
16046 + 16 * (i - info->first_altivec_reg_save);
16047
16048 savereg = gen_rtx_REG (V4SImode, i);
16049
16050 areg = gen_rtx_REG (Pmode, 0);
16051 emit_move_insn (areg, GEN_INT (offset));
16052
16053 /* AltiVec addressing mode is [reg+reg]. */
16054 mem = gen_frame_mem (V4SImode,
16055 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16056
16057 insn = emit_move_insn (mem, savereg);
16058
16059 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16060 areg, GEN_INT (offset));
16061 }
16062 }
16063
16064 /* VRSAVE is a bit vector representing which AltiVec registers
16065 are used. The OS uses this to determine which vector
16066 registers to save on a context switch. We need to save
16067 VRSAVE on the stack frame, add whatever AltiVec registers we
16068 used in this function, and do the corresponding magic in the
16069 epilogue. */
16070
16071 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16072 && info->vrsave_mask != 0)
16073 {
16074 rtx reg, mem, vrsave;
16075 int offset;
16076
16077 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16078 as frame_reg_rtx and r11 as the static chain pointer for
16079 nested functions. */
16080 reg = gen_rtx_REG (SImode, 0);
16081 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16082 if (TARGET_MACHO)
16083 emit_insn (gen_get_vrsave_internal (reg));
16084 else
16085 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16086
16087 if (!WORLD_SAVE_P (info))
16088 {
16089 /* Save VRSAVE. */
16090 offset = info->vrsave_save_offset + sp_offset;
16091 mem = gen_frame_mem (SImode,
16092 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16093 GEN_INT (offset)));
16094 insn = emit_move_insn (mem, reg);
16095 }
16096
16097 /* Include the registers in the mask. */
16098 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16099
16100 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16101 }
16102
1db02437 16103 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16104 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16105 || (DEFAULT_ABI == ABI_V4
16106 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16107 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16108 {
16109 /* If emit_load_toc_table will use the link register, we need to save
16110 it. We use R12 for this purpose because emit_load_toc_table
16111 can use register 0. This allows us to use a plain 'blr' to return
16112 from the procedure more often. */
16113 int save_LR_around_toc_setup = (TARGET_ELF
16114 && DEFAULT_ABI != ABI_AIX
16115 && flag_pic
16116 && ! info->lr_save_p
16117 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16118 if (save_LR_around_toc_setup)
16119 {
1de43f85 16120 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16121
c4ad648e 16122 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16123 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16124
c4ad648e 16125 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16126
c4ad648e 16127 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16128 RTX_FRAME_RELATED_P (insn) = 1;
16129 }
16130 else
16131 rs6000_emit_load_toc_table (TRUE);
16132 }
ee890fe2 16133
fcce224d 16134#if TARGET_MACHO
ee890fe2
SS
16135 if (DEFAULT_ABI == ABI_DARWIN
16136 && flag_pic && current_function_uses_pic_offset_table)
16137 {
1de43f85 16138 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16139 rtx src = machopic_function_base_sym ();
ee890fe2 16140
6d0a8091
DJ
16141 /* Save and restore LR locally around this call (in R0). */
16142 if (!info->lr_save_p)
6fb5fa3c 16143 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16144
6fb5fa3c 16145 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16146
6fb5fa3c
DB
16147 emit_move_insn (gen_rtx_REG (Pmode,
16148 RS6000_PIC_OFFSET_TABLE_REGNUM),
16149 lr);
6d0a8091
DJ
16150
16151 if (!info->lr_save_p)
6fb5fa3c 16152 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16153 }
fcce224d 16154#endif
9ebbca7d
GK
16155}
16156
9ebbca7d 16157/* Write function prologue. */
a4f6c312 16158
08c148a8 16159static void
f676971a 16160rs6000_output_function_prologue (FILE *file,
a2369ed3 16161 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16162{
16163 rs6000_stack_t *info = rs6000_stack_info ();
16164
4697a36c
MM
16165 if (TARGET_DEBUG_STACK)
16166 debug_stack_info (info);
9878760c 16167
a4f6c312
SS
16168 /* Write .extern for any function we will call to save and restore
16169 fp values. */
16170 if (info->first_fp_reg_save < 64
16171 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16172 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16173 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16174 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16175 RESTORE_FP_SUFFIX);
9878760c 16176
c764f757
RK
16177 /* Write .extern for AIX common mode routines, if needed. */
16178 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16179 {
f6709c70
JW
16180 fputs ("\t.extern __mulh\n", file);
16181 fputs ("\t.extern __mull\n", file);
16182 fputs ("\t.extern __divss\n", file);
16183 fputs ("\t.extern __divus\n", file);
16184 fputs ("\t.extern __quoss\n", file);
16185 fputs ("\t.extern __quous\n", file);
c764f757
RK
16186 common_mode_defined = 1;
16187 }
9878760c 16188
9ebbca7d 16189 if (! HAVE_prologue)
979721f8 16190 {
9ebbca7d 16191 start_sequence ();
9dda4cc8 16192
a4f6c312
SS
16193 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16194 the "toplevel" insn chain. */
2e040219 16195 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16196 rs6000_emit_prologue ();
2e040219 16197 emit_note (NOTE_INSN_DELETED);
178c3eff 16198
a3c9585f 16199 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16200 {
16201 rtx insn;
16202 unsigned addr = 0;
16203 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16204 {
16205 INSN_ADDRESSES_NEW (insn, addr);
16206 addr += 4;
16207 }
16208 }
9dda4cc8 16209
9ebbca7d 16210 if (TARGET_DEBUG_STACK)
a4f6c312 16211 debug_rtx_list (get_insns (), 100);
c9d691e9 16212 final (get_insns (), file, FALSE);
9ebbca7d 16213 end_sequence ();
979721f8
MM
16214 }
16215
9ebbca7d
GK
16216 rs6000_pic_labelno++;
16217}
f676971a 16218
9ebbca7d 16219/* Emit function epilogue as insns.
9878760c 16220
9ebbca7d
GK
16221 At present, dwarf2out_frame_debug_expr doesn't understand
16222 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16223 anywhere in the epilogue. Most of the insns below would in any case
16224 need special notes to explain where r11 is in relation to the stack. */
9878760c 16225
9ebbca7d 16226void
a2369ed3 16227rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16228{
16229 rs6000_stack_t *info;
16230 int restoring_FPRs_inline;
16231 int using_load_multiple;
d296e02e 16232 int using_mtcr_multiple;
9ebbca7d
GK
16233 int use_backchain_to_restore_sp;
16234 int sp_offset = 0;
16235 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16236 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16237 enum machine_mode reg_mode = Pmode;
327e5343 16238 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16239 int i;
16240
c19de7aa
AH
16241 info = rs6000_stack_info ();
16242
16243 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16244 {
16245 reg_mode = V2SImode;
16246 reg_size = 8;
16247 }
16248
9ebbca7d 16249 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16250 && (!TARGET_SPE_ABI
16251 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16252 && info->first_gp_reg_save < 31
16253 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16254 restoring_FPRs_inline = (sibcall
83720594 16255 || current_function_calls_eh_return
9ebbca7d
GK
16256 || info->first_fp_reg_save == 64
16257 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16258 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16259 || current_function_calls_alloca
16260 || info->total_size > 32767);
d296e02e 16261 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16262 || rs6000_cpu == PROCESSOR_PPC603
16263 || rs6000_cpu == PROCESSOR_PPC750
16264 || optimize_size);
16265
f57fe068 16266 if (WORLD_SAVE_P (info))
d62294f5
FJ
16267 {
16268 int i, j;
16269 char rname[30];
16270 const char *alloc_rname;
16271 rtvec p;
16272
16273 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16274 stack slot (which is not likely to be our caller.)
16275 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16276 rest_world is similar, except any R10 parameter is ignored.
16277 The exception-handling stuff that was here in 2.95 is no
16278 longer necessary. */
d62294f5
FJ
16279
16280 p = rtvec_alloc (9
16281 + 1
f676971a 16282 + 32 - info->first_gp_reg_save
c4ad648e
AM
16283 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16284 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16285
c4ad648e
AM
16286 strcpy (rname, ((current_function_calls_eh_return) ?
16287 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16288 alloc_rname = ggc_strdup (rname);
16289
16290 j = 0;
16291 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16292 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16293 gen_rtx_REG (Pmode,
1de43f85 16294 LR_REGNO));
d62294f5 16295 RTVEC_ELT (p, j++)
c4ad648e 16296 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16297 /* The instruction pattern requires a clobber here;
c4ad648e 16298 it is shared with the restVEC helper. */
d62294f5 16299 RTVEC_ELT (p, j++)
c4ad648e 16300 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16301
16302 {
c4ad648e
AM
16303 /* CR register traditionally saved as CR2. */
16304 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16305 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16306 GEN_INT (info->cr_save_offset));
0be76840 16307 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16308
16309 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16310 }
16311
16312 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16313 {
16314 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16315 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16316 GEN_INT (info->gp_save_offset
16317 + reg_size * i));
0be76840 16318 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16319
16320 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16321 }
d62294f5 16322 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16323 {
16324 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16325 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16326 GEN_INT (info->altivec_save_offset
16327 + 16 * i));
0be76840 16328 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16329
16330 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16331 }
d62294f5 16332 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16333 {
16334 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16335 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16336 GEN_INT (info->fp_save_offset
16337 + 8 * i));
0be76840 16338 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16339
16340 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16341 }
d62294f5 16342 RTVEC_ELT (p, j++)
c4ad648e 16343 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16344 RTVEC_ELT (p, j++)
c4ad648e 16345 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16346 RTVEC_ELT (p, j++)
c4ad648e 16347 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16348 RTVEC_ELT (p, j++)
c4ad648e 16349 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16350 RTVEC_ELT (p, j++)
c4ad648e 16351 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16352 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16353
16354 return;
16355 }
16356
45b194f8
AM
16357 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16358 if (info->push_p)
2b2c2fe5 16359 sp_offset = info->total_size;
f676971a 16360
9aa86737
AH
16361 /* Restore AltiVec registers if needed. */
16362 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16363 {
16364 int i;
16365
16366 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16367 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16368 {
16369 rtx addr, areg, mem;
16370
16371 areg = gen_rtx_REG (Pmode, 0);
16372 emit_move_insn
16373 (areg, GEN_INT (info->altivec_save_offset
16374 + sp_offset
16375 + 16 * (i - info->first_altivec_reg_save)));
16376
16377 /* AltiVec addressing mode is [reg+reg]. */
16378 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16379 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16380
16381 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16382 }
16383 }
16384
2b2c2fe5
EC
16385 /* If we have a frame pointer, a call to alloca, or a large stack
16386 frame, restore the old stack pointer using the backchain. Otherwise,
16387 we know what size to update it with. */
16388 if (use_backchain_to_restore_sp)
16389 {
16390 /* Under V.4, don't reset the stack pointer until after we're done
16391 loading the saved registers. */
16392 if (DEFAULT_ABI == ABI_V4)
16393 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16394
16395 emit_move_insn (frame_reg_rtx,
16396 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16397 sp_offset = 0;
2b2c2fe5 16398 }
45b194f8
AM
16399 else if (info->push_p
16400 && DEFAULT_ABI != ABI_V4
16401 && !current_function_calls_eh_return)
2b2c2fe5 16402 {
45b194f8
AM
16403 emit_insn (TARGET_32BIT
16404 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16405 GEN_INT (info->total_size))
16406 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16407 GEN_INT (info->total_size)));
16408 sp_offset = 0;
2b2c2fe5
EC
16409 }
16410
554c2941
AM
16411 /* Restore VRSAVE if needed. */
16412 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16413 && info->vrsave_mask != 0)
16414 {
16415 rtx addr, mem, reg;
16416
16417 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16418 GEN_INT (info->vrsave_save_offset + sp_offset));
16419 mem = gen_frame_mem (SImode, addr);
16420 reg = gen_rtx_REG (SImode, 12);
16421 emit_move_insn (reg, mem);
16422
16423 emit_insn (generate_set_vrsave (reg, info, 1));
16424 }
16425
9ebbca7d
GK
16426 /* Get the old lr if we saved it. */
16427 if (info->lr_save_p)
b6c9286a 16428 {
a3170dc6
AH
16429 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16430 info->lr_save_offset + sp_offset);
ba4828e0 16431
9ebbca7d 16432 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16433 }
f676971a 16434
9ebbca7d
GK
16435 /* Get the old cr if we saved it. */
16436 if (info->cr_save_p)
16437 {
16438 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16439 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16440 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16441
9ebbca7d
GK
16442 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16443 }
f676971a 16444
9ebbca7d 16445 /* Set LR here to try to overlap restores below. */
4697a36c 16446 if (info->lr_save_p)
1de43f85 16447 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16448 gen_rtx_REG (Pmode, 0));
f676971a 16449
83720594
RH
16450 /* Load exception handler data registers, if needed. */
16451 if (current_function_calls_eh_return)
16452 {
78e1b90d
DE
16453 unsigned int i, regno;
16454
fc4767bb
JJ
16455 if (TARGET_AIX)
16456 {
16457 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16458 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16459 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16460
16461 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16462 }
16463
83720594
RH
16464 for (i = 0; ; ++i)
16465 {
a3170dc6 16466 rtx mem;
83720594
RH
16467
16468 regno = EH_RETURN_DATA_REGNO (i);
16469 if (regno == INVALID_REGNUM)
16470 break;
16471
a3170dc6
AH
16472 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16473 info->ehrd_offset + sp_offset
16474 + reg_size * (int) i);
83720594
RH
16475
16476 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16477 }
16478 }
f676971a 16479
9ebbca7d
GK
16480 /* Restore GPRs. This is done as a PARALLEL if we are using
16481 the load-multiple instructions. */
16482 if (using_load_multiple)
979721f8 16483 {
9ebbca7d
GK
16484 rtvec p;
16485 p = rtvec_alloc (32 - info->first_gp_reg_save);
16486 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16487 {
f676971a
EC
16488 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16489 GEN_INT (info->gp_save_offset
16490 + sp_offset
9ebbca7d 16491 + reg_size * i));
0be76840 16492 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16493
f676971a 16494 RTVEC_ELT (p, i) =
9ebbca7d
GK
16495 gen_rtx_SET (VOIDmode,
16496 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16497 mem);
979721f8 16498 }
9ebbca7d 16499 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16500 }
52ff33d0
NF
16501 else if (TARGET_SPE_ABI
16502 && info->spe_64bit_regs_used != 0
16503 && info->first_gp_reg_save != 32)
16504 {
52ff33d0
NF
16505 /* Determine whether we can address all of the registers that need
16506 to be saved with an offset from the stack pointer that fits in
16507 the small const field for SPE memory instructions. */
16508 int spe_regs_addressable_via_sp
16509 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16510 + (32 - info->first_gp_reg_save - 1) * reg_size);
16511 int spe_offset;
16512
16513 if (spe_regs_addressable_via_sp)
45b194f8 16514 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16515 else
16516 {
45b194f8 16517 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16518 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16519 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16520 There's no need to worry here because the static chain is passed
16521 anew to every function. */
45b194f8
AM
16522 if (frame_reg_rtx == sp_reg_rtx)
16523 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16524 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16525 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16526 /* Keep the invariant that frame_reg_rtx + sp_offset points
16527 at the top of the stack frame. */
16528 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16529
16530 spe_offset = 0;
16531 }
16532
16533 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16534 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16535 {
16536 rtx offset, addr, mem;
16537
16538 /* We're doing all this to ensure that the immediate offset
16539 fits into the immediate field of 'evldd'. */
16540 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16541
16542 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16543 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16544 mem = gen_rtx_MEM (V2SImode, addr);
16545
16546 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16547 mem);
16548 }
16549 }
9ebbca7d
GK
16550 else
16551 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16552 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16553 {
f676971a
EC
16554 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16555 GEN_INT (info->gp_save_offset
16556 + sp_offset
9ebbca7d 16557 + reg_size * i));
0be76840 16558 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16559
f676971a 16560 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16561 info->first_gp_reg_save + i), mem);
9ebbca7d 16562 }
9878760c 16563
9ebbca7d
GK
16564 /* Restore fpr's if we need to do it without calling a function. */
16565 if (restoring_FPRs_inline)
16566 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16567 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16568 && ! call_used_regs[info->first_fp_reg_save+i]))
16569 {
16570 rtx addr, mem;
16571 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16572 GEN_INT (info->fp_save_offset
16573 + sp_offset
a4f6c312 16574 + 8 * i));
0be76840 16575 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16576
f676971a 16577 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16578 info->first_fp_reg_save + i),
16579 mem);
16580 }
8d30c4ee 16581
9ebbca7d
GK
16582 /* If we saved cr, restore it here. Just those that were used. */
16583 if (info->cr_save_p)
979721f8 16584 {
9ebbca7d 16585 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16586 int count = 0;
f676971a 16587
d296e02e 16588 if (using_mtcr_multiple)
979721f8 16589 {
9ebbca7d 16590 for (i = 0; i < 8; i++)
6fb5fa3c 16591 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16592 count++;
37409796 16593 gcc_assert (count);
e35b9579
GK
16594 }
16595
d296e02e 16596 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16597 {
16598 rtvec p;
16599 int ndx;
f676971a 16600
e35b9579 16601 p = rtvec_alloc (count);
9ebbca7d 16602
e35b9579 16603 ndx = 0;
9ebbca7d 16604 for (i = 0; i < 8; i++)
6fb5fa3c 16605 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16606 {
16607 rtvec r = rtvec_alloc (2);
16608 RTVEC_ELT (r, 0) = r12_rtx;
16609 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16610 RTVEC_ELT (p, ndx) =
f676971a 16611 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16612 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16613 ndx++;
9ebbca7d
GK
16614 }
16615 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16616 gcc_assert (ndx == count);
979721f8
MM
16617 }
16618 else
9ebbca7d 16619 for (i = 0; i < 8; i++)
6fb5fa3c 16620 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16621 {
f676971a 16622 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16623 CR0_REGNO+i),
16624 r12_rtx));
979721f8 16625 }
979721f8
MM
16626 }
16627
9ebbca7d 16628 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16629 have been done. */
16630 if (frame_reg_rtx != sp_reg_rtx)
16631 {
16632 /* This blockage is needed so that sched doesn't decide to move
16633 the sp change before the register restores. */
16634 rs6000_emit_stack_tie ();
45b194f8
AM
16635 if (sp_offset != 0)
16636 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16637 GEN_INT (sp_offset)));
52ff33d0
NF
16638 else
16639 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16640 }
16641 else if (sp_offset != 0)
16642 emit_insn (TARGET_32BIT
16643 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16644 GEN_INT (sp_offset))
16645 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16646 GEN_INT (sp_offset)));
b6c9286a 16647
83720594
RH
16648 if (current_function_calls_eh_return)
16649 {
16650 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16651 emit_insn (TARGET_32BIT
83720594
RH
16652 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16653 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16654 }
16655
9ebbca7d
GK
16656 if (!sibcall)
16657 {
16658 rtvec p;
16659 if (! restoring_FPRs_inline)
16660 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16661 else
16662 p = rtvec_alloc (2);
b6c9286a 16663
e35b9579 16664 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16665 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16666 gen_rtx_REG (Pmode,
1de43f85 16667 LR_REGNO));
9ebbca7d
GK
16668
16669 /* If we have to restore more than two FP registers, branch to the
16670 restore function. It will return to our caller. */
16671 if (! restoring_FPRs_inline)
16672 {
16673 int i;
16674 char rname[30];
520a57c8 16675 const char *alloc_rname;
979721f8 16676
f676971a 16677 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16678 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16679 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16680 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16681 gen_rtx_SYMBOL_REF (Pmode,
16682 alloc_rname));
b6c9286a 16683
9ebbca7d
GK
16684 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16685 {
16686 rtx addr, mem;
16687 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16688 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16689 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16690
f676971a 16691 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16692 gen_rtx_SET (VOIDmode,
16693 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16694 mem);
b6c9286a
MM
16695 }
16696 }
f676971a 16697
9ebbca7d 16698 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16699 }
9878760c
RK
16700}
16701
16702/* Write function epilogue. */
16703
08c148a8 16704static void
f676971a 16705rs6000_output_function_epilogue (FILE *file,
a2369ed3 16706 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16707{
9ebbca7d 16708 if (! HAVE_epilogue)
9878760c 16709 {
9ebbca7d
GK
16710 rtx insn = get_last_insn ();
16711 /* If the last insn was a BARRIER, we don't have to write anything except
16712 the trace table. */
16713 if (GET_CODE (insn) == NOTE)
16714 insn = prev_nonnote_insn (insn);
16715 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16716 {
9ebbca7d
GK
16717 /* This is slightly ugly, but at least we don't have two
16718 copies of the epilogue-emitting code. */
16719 start_sequence ();
16720
16721 /* A NOTE_INSN_DELETED is supposed to be at the start
16722 and end of the "toplevel" insn chain. */
2e040219 16723 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16724 rs6000_emit_epilogue (FALSE);
2e040219 16725 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16726
a3c9585f 16727 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16728 {
16729 rtx insn;
16730 unsigned addr = 0;
16731 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16732 {
16733 INSN_ADDRESSES_NEW (insn, addr);
16734 addr += 4;
16735 }
16736 }
16737
9ebbca7d 16738 if (TARGET_DEBUG_STACK)
a4f6c312 16739 debug_rtx_list (get_insns (), 100);
c9d691e9 16740 final (get_insns (), file, FALSE);
9ebbca7d 16741 end_sequence ();
4697a36c 16742 }
9878760c 16743 }
b4ac57ab 16744
efdba735
SH
16745#if TARGET_MACHO
16746 macho_branch_islands ();
0e5da0be
GK
16747 /* Mach-O doesn't support labels at the end of objects, so if
16748 it looks like we might want one, insert a NOP. */
16749 {
16750 rtx insn = get_last_insn ();
16751 while (insn
16752 && NOTE_P (insn)
a38e7aa5 16753 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16754 insn = PREV_INSN (insn);
f676971a
EC
16755 if (insn
16756 && (LABEL_P (insn)
0e5da0be 16757 || (NOTE_P (insn)
a38e7aa5 16758 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16759 fputs ("\tnop\n", file);
16760 }
16761#endif
16762
9b30bae2 16763 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16764 on its format.
16765
16766 We don't output a traceback table if -finhibit-size-directive was
16767 used. The documentation for -finhibit-size-directive reads
16768 ``don't output a @code{.size} assembler directive, or anything
16769 else that would cause trouble if the function is split in the
16770 middle, and the two halves are placed at locations far apart in
16771 memory.'' The traceback table has this property, since it
16772 includes the offset from the start of the function to the
4d30c363
MM
16773 traceback table itself.
16774
16775 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16776 different traceback table. */
57ac7be9 16777 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16778 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16779 {
69c75916 16780 const char *fname = NULL;
3ac88239 16781 const char *language_string = lang_hooks.name;
6041bf2f 16782 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16783 int i;
57ac7be9 16784 int optional_tbtab;
8097c268 16785 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16786
16787 if (rs6000_traceback == traceback_full)
16788 optional_tbtab = 1;
16789 else if (rs6000_traceback == traceback_part)
16790 optional_tbtab = 0;
16791 else
16792 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16793
69c75916
AM
16794 if (optional_tbtab)
16795 {
16796 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16797 while (*fname == '.') /* V.4 encodes . in the name */
16798 fname++;
16799
16800 /* Need label immediately before tbtab, so we can compute
16801 its offset from the function start. */
16802 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16803 ASM_OUTPUT_LABEL (file, fname);
16804 }
314fc5a9
ILT
16805
16806 /* The .tbtab pseudo-op can only be used for the first eight
16807 expressions, since it can't handle the possibly variable
16808 length fields that follow. However, if you omit the optional
16809 fields, the assembler outputs zeros for all optional fields
16810 anyways, giving each variable length field is minimum length
16811 (as defined in sys/debug.h). Thus we can not use the .tbtab
16812 pseudo-op at all. */
16813
16814 /* An all-zero word flags the start of the tbtab, for debuggers
16815 that have to find it by searching forward from the entry
16816 point or from the current pc. */
19d2d16f 16817 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16818
16819 /* Tbtab format type. Use format type 0. */
19d2d16f 16820 fputs ("\t.byte 0,", file);
314fc5a9 16821
5fc921c1
DE
16822 /* Language type. Unfortunately, there does not seem to be any
16823 official way to discover the language being compiled, so we
16824 use language_string.
16825 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16826 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16827 a number, so for now use 9. */
5fc921c1 16828 if (! strcmp (language_string, "GNU C"))
314fc5a9 16829 i = 0;
6de9cd9a
DN
16830 else if (! strcmp (language_string, "GNU F77")
16831 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16832 i = 1;
8b83775b 16833 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16834 i = 2;
5fc921c1
DE
16835 else if (! strcmp (language_string, "GNU Ada"))
16836 i = 3;
56438901
AM
16837 else if (! strcmp (language_string, "GNU C++")
16838 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16839 i = 9;
9517ead8
AG
16840 else if (! strcmp (language_string, "GNU Java"))
16841 i = 13;
5fc921c1
DE
16842 else if (! strcmp (language_string, "GNU Objective-C"))
16843 i = 14;
314fc5a9 16844 else
37409796 16845 gcc_unreachable ();
314fc5a9
ILT
16846 fprintf (file, "%d,", i);
16847
16848 /* 8 single bit fields: global linkage (not set for C extern linkage,
16849 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16850 from start of procedure stored in tbtab, internal function, function
16851 has controlled storage, function has no toc, function uses fp,
16852 function logs/aborts fp operations. */
16853 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16854 fprintf (file, "%d,",
16855 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16856
16857 /* 6 bitfields: function is interrupt handler, name present in
16858 proc table, function calls alloca, on condition directives
16859 (controls stack walks, 3 bits), saves condition reg, saves
16860 link reg. */
16861 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16862 set up as a frame pointer, even when there is no alloca call. */
16863 fprintf (file, "%d,",
6041bf2f
DE
16864 ((optional_tbtab << 6)
16865 | ((optional_tbtab & frame_pointer_needed) << 5)
16866 | (info->cr_save_p << 1)
16867 | (info->lr_save_p)));
314fc5a9 16868
6041bf2f 16869 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16870 (6 bits). */
16871 fprintf (file, "%d,",
4697a36c 16872 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16873
16874 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16875 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16876
6041bf2f
DE
16877 if (optional_tbtab)
16878 {
16879 /* Compute the parameter info from the function decl argument
16880 list. */
16881 tree decl;
16882 int next_parm_info_bit = 31;
314fc5a9 16883
6041bf2f
DE
16884 for (decl = DECL_ARGUMENTS (current_function_decl);
16885 decl; decl = TREE_CHAIN (decl))
16886 {
16887 rtx parameter = DECL_INCOMING_RTL (decl);
16888 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16889
6041bf2f
DE
16890 if (GET_CODE (parameter) == REG)
16891 {
ebb109ad 16892 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16893 {
16894 int bits;
16895
16896 float_parms++;
16897
37409796
NS
16898 switch (mode)
16899 {
16900 case SFmode:
e41b2a33 16901 case SDmode:
37409796
NS
16902 bits = 0x2;
16903 break;
16904
16905 case DFmode:
7393f7f8 16906 case DDmode:
37409796 16907 case TFmode:
7393f7f8 16908 case TDmode:
37409796
NS
16909 bits = 0x3;
16910 break;
16911
16912 default:
16913 gcc_unreachable ();
16914 }
6041bf2f
DE
16915
16916 /* If only one bit will fit, don't or in this entry. */
16917 if (next_parm_info_bit > 0)
16918 parm_info |= (bits << (next_parm_info_bit - 1));
16919 next_parm_info_bit -= 2;
16920 }
16921 else
16922 {
16923 fixed_parms += ((GET_MODE_SIZE (mode)
16924 + (UNITS_PER_WORD - 1))
16925 / UNITS_PER_WORD);
16926 next_parm_info_bit -= 1;
16927 }
16928 }
16929 }
16930 }
314fc5a9
ILT
16931
16932 /* Number of fixed point parameters. */
16933 /* This is actually the number of words of fixed point parameters; thus
16934 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16935 fprintf (file, "%d,", fixed_parms);
16936
16937 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16938 all on stack. */
16939 /* This is actually the number of fp registers that hold parameters;
16940 and thus the maximum value is 13. */
16941 /* Set parameters on stack bit if parameters are not in their original
16942 registers, regardless of whether they are on the stack? Xlc
16943 seems to set the bit when not optimizing. */
16944 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16945
6041bf2f
DE
16946 if (! optional_tbtab)
16947 return;
16948
314fc5a9
ILT
16949 /* Optional fields follow. Some are variable length. */
16950
16951 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16952 11 double float. */
16953 /* There is an entry for each parameter in a register, in the order that
16954 they occur in the parameter list. Any intervening arguments on the
16955 stack are ignored. If the list overflows a long (max possible length
16956 34 bits) then completely leave off all elements that don't fit. */
16957 /* Only emit this long if there was at least one parameter. */
16958 if (fixed_parms || float_parms)
16959 fprintf (file, "\t.long %d\n", parm_info);
16960
16961 /* Offset from start of code to tb table. */
19d2d16f 16962 fputs ("\t.long ", file);
314fc5a9 16963 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16964 if (TARGET_AIX)
16965 RS6000_OUTPUT_BASENAME (file, fname);
16966 else
16967 assemble_name (file, fname);
16968 putc ('-', file);
16969 rs6000_output_function_entry (file, fname);
19d2d16f 16970 putc ('\n', file);
314fc5a9
ILT
16971
16972 /* Interrupt handler mask. */
16973 /* Omit this long, since we never set the interrupt handler bit
16974 above. */
16975
16976 /* Number of CTL (controlled storage) anchors. */
16977 /* Omit this long, since the has_ctl bit is never set above. */
16978
16979 /* Displacement into stack of each CTL anchor. */
16980 /* Omit this list of longs, because there are no CTL anchors. */
16981
16982 /* Length of function name. */
69c75916
AM
16983 if (*fname == '*')
16984 ++fname;
296b8152 16985 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16986
16987 /* Function name. */
16988 assemble_string (fname, strlen (fname));
16989
16990 /* Register for alloca automatic storage; this is always reg 31.
16991 Only emit this if the alloca bit was set above. */
16992 if (frame_pointer_needed)
19d2d16f 16993 fputs ("\t.byte 31\n", file);
b1765bde
DE
16994
16995 fputs ("\t.align 2\n", file);
9b30bae2 16996 }
9878760c 16997}
17167fd8 16998\f
a4f6c312
SS
16999/* A C compound statement that outputs the assembler code for a thunk
17000 function, used to implement C++ virtual function calls with
17001 multiple inheritance. The thunk acts as a wrapper around a virtual
17002 function, adjusting the implicit object parameter before handing
17003 control off to the real function.
17004
17005 First, emit code to add the integer DELTA to the location that
17006 contains the incoming first argument. Assume that this argument
17007 contains a pointer, and is the one used to pass the `this' pointer
17008 in C++. This is the incoming argument *before* the function
17009 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17010 values of all other incoming arguments.
17167fd8
MM
17011
17012 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17013 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17014 not touch the return address. Hence returning from FUNCTION will
17015 return to whoever called the current `thunk'.
17167fd8 17016
a4f6c312
SS
17017 The effect must be as if FUNCTION had been called directly with the
17018 adjusted first argument. This macro is responsible for emitting
17019 all of the code for a thunk function; output_function_prologue()
17020 and output_function_epilogue() are not invoked.
17167fd8 17021
a4f6c312
SS
17022 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17023 been extracted from it.) It might possibly be useful on some
17024 targets, but probably not.
17167fd8 17025
a4f6c312
SS
17026 If you do not define this macro, the target-independent code in the
17027 C++ frontend will generate a less efficient heavyweight thunk that
17028 calls FUNCTION instead of jumping to it. The generic approach does
17029 not support varargs. */
17167fd8 17030
3961e8fe 17031static void
f676971a
EC
17032rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17033 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17034 tree function)
17167fd8 17035{
5b71a4e7 17036 rtx this, insn, funexp;
17167fd8 17037
5b71a4e7 17038 reload_completed = 1;
fe3ad572 17039 epilogue_completed = 1;
56a7189a 17040
5b71a4e7 17041 /* Mark the end of the (empty) prologue. */
2e040219 17042 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17043
5b71a4e7
DE
17044 /* Find the "this" pointer. If the function returns a structure,
17045 the structure return pointer is in r3. */
61f71b34 17046 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17047 this = gen_rtx_REG (Pmode, 4);
56a7189a 17048 else
5b71a4e7 17049 this = gen_rtx_REG (Pmode, 3);
17167fd8 17050
5b71a4e7
DE
17051 /* Apply the constant offset, if required. */
17052 if (delta)
17053 {
17054 rtx delta_rtx = GEN_INT (delta);
17055 emit_insn (TARGET_32BIT
17056 ? gen_addsi3 (this, this, delta_rtx)
17057 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17058 }
17059
5b71a4e7
DE
17060 /* Apply the offset from the vtable, if required. */
17061 if (vcall_offset)
17167fd8 17062 {
5b71a4e7
DE
17063 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17064 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17065
5b71a4e7 17066 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17067 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17068 {
17069 emit_insn (TARGET_32BIT
17070 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17071 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17072 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17073 }
17074 else
17075 {
17076 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17077
17078 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17079 }
5b71a4e7
DE
17080 emit_insn (TARGET_32BIT
17081 ? gen_addsi3 (this, this, tmp)
17082 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17083 }
17084
5b71a4e7
DE
17085 /* Generate a tail call to the target function. */
17086 if (!TREE_USED (function))
17087 {
17088 assemble_external (function);
17089 TREE_USED (function) = 1;
17090 }
17091 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17092 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17093
17094#if TARGET_MACHO
ab82a49f 17095 if (MACHOPIC_INDIRECT)
5b71a4e7 17096 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17097#endif
5b71a4e7
DE
17098
17099 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17100 generate sibcall RTL explicitly. */
5b71a4e7
DE
17101 insn = emit_call_insn (
17102 gen_rtx_PARALLEL (VOIDmode,
17103 gen_rtvec (4,
17104 gen_rtx_CALL (VOIDmode,
17105 funexp, const0_rtx),
17106 gen_rtx_USE (VOIDmode, const0_rtx),
17107 gen_rtx_USE (VOIDmode,
17108 gen_rtx_REG (SImode,
1de43f85 17109 LR_REGNO)),
5b71a4e7
DE
17110 gen_rtx_RETURN (VOIDmode))));
17111 SIBLING_CALL_P (insn) = 1;
17112 emit_barrier ();
17113
17114 /* Run just enough of rest_of_compilation to get the insns emitted.
17115 There's not really enough bulk here to make other passes such as
17116 instruction scheduling worth while. Note that use_thunk calls
17117 assemble_start_function and assemble_end_function. */
17118 insn = get_insns ();
55e092c4 17119 insn_locators_alloc ();
5b71a4e7
DE
17120 shorten_branches (insn);
17121 final_start_function (insn, file, 1);
c9d691e9 17122 final (insn, file, 1);
5b71a4e7
DE
17123 final_end_function ();
17124
17125 reload_completed = 0;
fe3ad572 17126 epilogue_completed = 0;
9ebbca7d 17127}
9ebbca7d
GK
17128\f
17129/* A quick summary of the various types of 'constant-pool tables'
17130 under PowerPC:
17131
f676971a 17132 Target Flags Name One table per
9ebbca7d
GK
17133 AIX (none) AIX TOC object file
17134 AIX -mfull-toc AIX TOC object file
17135 AIX -mminimal-toc AIX minimal TOC translation unit
17136 SVR4/EABI (none) SVR4 SDATA object file
17137 SVR4/EABI -fpic SVR4 pic object file
17138 SVR4/EABI -fPIC SVR4 PIC translation unit
17139 SVR4/EABI -mrelocatable EABI TOC function
17140 SVR4/EABI -maix AIX TOC object file
f676971a 17141 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17142 AIX minimal TOC translation unit
17143
17144 Name Reg. Set by entries contains:
17145 made by addrs? fp? sum?
17146
17147 AIX TOC 2 crt0 as Y option option
17148 AIX minimal TOC 30 prolog gcc Y Y option
17149 SVR4 SDATA 13 crt0 gcc N Y N
17150 SVR4 pic 30 prolog ld Y not yet N
17151 SVR4 PIC 30 prolog gcc Y option option
17152 EABI TOC 30 prolog gcc Y option option
17153
17154*/
17155
9ebbca7d
GK
17156/* Hash functions for the hash table. */
17157
17158static unsigned
a2369ed3 17159rs6000_hash_constant (rtx k)
9ebbca7d 17160{
46b33600
RH
17161 enum rtx_code code = GET_CODE (k);
17162 enum machine_mode mode = GET_MODE (k);
17163 unsigned result = (code << 3) ^ mode;
17164 const char *format;
17165 int flen, fidx;
f676971a 17166
46b33600
RH
17167 format = GET_RTX_FORMAT (code);
17168 flen = strlen (format);
17169 fidx = 0;
9ebbca7d 17170
46b33600
RH
17171 switch (code)
17172 {
17173 case LABEL_REF:
17174 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17175
17176 case CONST_DOUBLE:
17177 if (mode != VOIDmode)
17178 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17179 flen = 2;
17180 break;
17181
17182 case CODE_LABEL:
17183 fidx = 3;
17184 break;
17185
17186 default:
17187 break;
17188 }
9ebbca7d
GK
17189
17190 for (; fidx < flen; fidx++)
17191 switch (format[fidx])
17192 {
17193 case 's':
17194 {
17195 unsigned i, len;
17196 const char *str = XSTR (k, fidx);
17197 len = strlen (str);
17198 result = result * 613 + len;
17199 for (i = 0; i < len; i++)
17200 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17201 break;
17202 }
9ebbca7d
GK
17203 case 'u':
17204 case 'e':
17205 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17206 break;
17207 case 'i':
17208 case 'n':
17209 result = result * 613 + (unsigned) XINT (k, fidx);
17210 break;
17211 case 'w':
17212 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17213 result = result * 613 + (unsigned) XWINT (k, fidx);
17214 else
17215 {
17216 size_t i;
9390387d 17217 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17218 result = result * 613 + (unsigned) (XWINT (k, fidx)
17219 >> CHAR_BIT * i);
17220 }
17221 break;
09501938
DE
17222 case '0':
17223 break;
9ebbca7d 17224 default:
37409796 17225 gcc_unreachable ();
9ebbca7d 17226 }
46b33600 17227
9ebbca7d
GK
17228 return result;
17229}
17230
17231static unsigned
a2369ed3 17232toc_hash_function (const void *hash_entry)
9ebbca7d 17233{
f676971a 17234 const struct toc_hash_struct *thc =
a9098fd0
GK
17235 (const struct toc_hash_struct *) hash_entry;
17236 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17237}
17238
17239/* Compare H1 and H2 for equivalence. */
17240
17241static int
a2369ed3 17242toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17243{
17244 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17245 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17246
a9098fd0
GK
17247 if (((const struct toc_hash_struct *) h1)->key_mode
17248 != ((const struct toc_hash_struct *) h2)->key_mode)
17249 return 0;
17250
5692c7bc 17251 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17252}
17253
28e510bd
MM
17254/* These are the names given by the C++ front-end to vtables, and
17255 vtable-like objects. Ideally, this logic should not be here;
17256 instead, there should be some programmatic way of inquiring as
17257 to whether or not an object is a vtable. */
17258
17259#define VTABLE_NAME_P(NAME) \
9390387d 17260 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17261 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17262 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17263 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17264 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17265
17266void
a2369ed3 17267rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17268{
17269 /* Currently C++ toc references to vtables can be emitted before it
17270 is decided whether the vtable is public or private. If this is
17271 the case, then the linker will eventually complain that there is
f676971a 17272 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17273 we emit the TOC reference to reference the symbol and not the
17274 section. */
17275 const char *name = XSTR (x, 0);
54ee9799 17276
f676971a 17277 if (VTABLE_NAME_P (name))
54ee9799
DE
17278 {
17279 RS6000_OUTPUT_BASENAME (file, name);
17280 }
17281 else
17282 assemble_name (file, name);
28e510bd
MM
17283}
17284
a4f6c312
SS
17285/* Output a TOC entry. We derive the entry name from what is being
17286 written. */
9878760c
RK
17287
17288void
a2369ed3 17289output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17290{
17291 char buf[256];
3cce094d 17292 const char *name = buf;
ec940faa 17293 const char *real_name;
9878760c 17294 rtx base = x;
16fdeb48 17295 HOST_WIDE_INT offset = 0;
9878760c 17296
37409796 17297 gcc_assert (!TARGET_NO_TOC);
4697a36c 17298
9ebbca7d
GK
17299 /* When the linker won't eliminate them, don't output duplicate
17300 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17301 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17302 CODE_LABELs. */
17303 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17304 {
17305 struct toc_hash_struct *h;
17306 void * * found;
f676971a 17307
17211ab5 17308 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17309 time because GGC is not initialized at that point. */
17211ab5 17310 if (toc_hash_table == NULL)
f676971a 17311 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17312 toc_hash_eq, NULL);
17313
9ebbca7d
GK
17314 h = ggc_alloc (sizeof (*h));
17315 h->key = x;
a9098fd0 17316 h->key_mode = mode;
9ebbca7d 17317 h->labelno = labelno;
f676971a 17318
9ebbca7d
GK
17319 found = htab_find_slot (toc_hash_table, h, 1);
17320 if (*found == NULL)
17321 *found = h;
f676971a 17322 else /* This is indeed a duplicate.
9ebbca7d
GK
17323 Set this label equal to that label. */
17324 {
17325 fputs ("\t.set ", file);
17326 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17327 fprintf (file, "%d,", labelno);
17328 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17329 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17330 found)->labelno));
17331 return;
17332 }
17333 }
17334
17335 /* If we're going to put a double constant in the TOC, make sure it's
17336 aligned properly when strict alignment is on. */
ff1720ed
RK
17337 if (GET_CODE (x) == CONST_DOUBLE
17338 && STRICT_ALIGNMENT
a9098fd0 17339 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17340 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17341 ASM_OUTPUT_ALIGN (file, 3);
17342 }
17343
4977bab6 17344 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17345
37c37a57
RK
17346 /* Handle FP constants specially. Note that if we have a minimal
17347 TOC, things we put here aren't actually in the TOC, so we can allow
17348 FP constants. */
00b79d54
BE
17349 if (GET_CODE (x) == CONST_DOUBLE &&
17350 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17351 {
17352 REAL_VALUE_TYPE rv;
17353 long k[4];
17354
17355 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17356 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17357 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17358 else
17359 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17360
17361 if (TARGET_64BIT)
17362 {
17363 if (TARGET_MINIMAL_TOC)
17364 fputs (DOUBLE_INT_ASM_OP, file);
17365 else
17366 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17367 k[0] & 0xffffffff, k[1] & 0xffffffff,
17368 k[2] & 0xffffffff, k[3] & 0xffffffff);
17369 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17370 k[0] & 0xffffffff, k[1] & 0xffffffff,
17371 k[2] & 0xffffffff, k[3] & 0xffffffff);
17372 return;
17373 }
17374 else
17375 {
17376 if (TARGET_MINIMAL_TOC)
17377 fputs ("\t.long ", file);
17378 else
17379 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17380 k[0] & 0xffffffff, k[1] & 0xffffffff,
17381 k[2] & 0xffffffff, k[3] & 0xffffffff);
17382 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17383 k[0] & 0xffffffff, k[1] & 0xffffffff,
17384 k[2] & 0xffffffff, k[3] & 0xffffffff);
17385 return;
17386 }
17387 }
00b79d54
BE
17388 else if (GET_CODE (x) == CONST_DOUBLE &&
17389 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17390 {
042259f2
DE
17391 REAL_VALUE_TYPE rv;
17392 long k[2];
0adc764e 17393
042259f2 17394 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17395
17396 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17397 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17398 else
17399 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17400
13ded975
DE
17401 if (TARGET_64BIT)
17402 {
17403 if (TARGET_MINIMAL_TOC)
2bfcf297 17404 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17405 else
2f0552b6
AM
17406 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17407 k[0] & 0xffffffff, k[1] & 0xffffffff);
17408 fprintf (file, "0x%lx%08lx\n",
17409 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17410 return;
17411 }
1875cc88 17412 else
13ded975
DE
17413 {
17414 if (TARGET_MINIMAL_TOC)
2bfcf297 17415 fputs ("\t.long ", file);
13ded975 17416 else
2f0552b6
AM
17417 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17418 k[0] & 0xffffffff, k[1] & 0xffffffff);
17419 fprintf (file, "0x%lx,0x%lx\n",
17420 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17421 return;
17422 }
9878760c 17423 }
00b79d54
BE
17424 else if (GET_CODE (x) == CONST_DOUBLE &&
17425 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17426 {
042259f2
DE
17427 REAL_VALUE_TYPE rv;
17428 long l;
9878760c 17429
042259f2 17430 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17431 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17432 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17433 else
17434 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17435
31bfaa0b
DE
17436 if (TARGET_64BIT)
17437 {
17438 if (TARGET_MINIMAL_TOC)
2bfcf297 17439 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17440 else
2f0552b6
AM
17441 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17442 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17443 return;
17444 }
042259f2 17445 else
31bfaa0b
DE
17446 {
17447 if (TARGET_MINIMAL_TOC)
2bfcf297 17448 fputs ("\t.long ", file);
31bfaa0b 17449 else
2f0552b6
AM
17450 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17451 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17452 return;
17453 }
042259f2 17454 }
f176e826 17455 else if (GET_MODE (x) == VOIDmode
a9098fd0 17456 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17457 {
e2c953b6 17458 unsigned HOST_WIDE_INT low;
042259f2
DE
17459 HOST_WIDE_INT high;
17460
17461 if (GET_CODE (x) == CONST_DOUBLE)
17462 {
17463 low = CONST_DOUBLE_LOW (x);
17464 high = CONST_DOUBLE_HIGH (x);
17465 }
17466 else
17467#if HOST_BITS_PER_WIDE_INT == 32
17468 {
17469 low = INTVAL (x);
0858c623 17470 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17471 }
17472#else
17473 {
c4ad648e
AM
17474 low = INTVAL (x) & 0xffffffff;
17475 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17476 }
17477#endif
9878760c 17478
a9098fd0
GK
17479 /* TOC entries are always Pmode-sized, but since this
17480 is a bigendian machine then if we're putting smaller
17481 integer constants in the TOC we have to pad them.
17482 (This is still a win over putting the constants in
17483 a separate constant pool, because then we'd have
02a4ec28
FS
17484 to have both a TOC entry _and_ the actual constant.)
17485
17486 For a 32-bit target, CONST_INT values are loaded and shifted
17487 entirely within `low' and can be stored in one TOC entry. */
17488
37409796
NS
17489 /* It would be easy to make this work, but it doesn't now. */
17490 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17491
17492 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17493 {
17494#if HOST_BITS_PER_WIDE_INT == 32
17495 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17496 POINTER_SIZE, &low, &high, 0);
17497#else
17498 low |= high << 32;
17499 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17500 high = (HOST_WIDE_INT) low >> 32;
17501 low &= 0xffffffff;
17502#endif
17503 }
a9098fd0 17504
13ded975
DE
17505 if (TARGET_64BIT)
17506 {
17507 if (TARGET_MINIMAL_TOC)
2bfcf297 17508 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17509 else
2f0552b6
AM
17510 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17511 (long) high & 0xffffffff, (long) low & 0xffffffff);
17512 fprintf (file, "0x%lx%08lx\n",
17513 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17514 return;
17515 }
1875cc88 17516 else
13ded975 17517 {
02a4ec28
FS
17518 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17519 {
17520 if (TARGET_MINIMAL_TOC)
2bfcf297 17521 fputs ("\t.long ", file);
02a4ec28 17522 else
2bfcf297 17523 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17524 (long) high & 0xffffffff, (long) low & 0xffffffff);
17525 fprintf (file, "0x%lx,0x%lx\n",
17526 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17527 }
13ded975 17528 else
02a4ec28
FS
17529 {
17530 if (TARGET_MINIMAL_TOC)
2bfcf297 17531 fputs ("\t.long ", file);
02a4ec28 17532 else
2f0552b6
AM
17533 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17534 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17535 }
13ded975
DE
17536 return;
17537 }
9878760c
RK
17538 }
17539
17540 if (GET_CODE (x) == CONST)
17541 {
37409796 17542 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17543
9878760c
RK
17544 base = XEXP (XEXP (x, 0), 0);
17545 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17546 }
f676971a 17547
37409796
NS
17548 switch (GET_CODE (base))
17549 {
17550 case SYMBOL_REF:
17551 name = XSTR (base, 0);
17552 break;
17553
17554 case LABEL_REF:
17555 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17556 CODE_LABEL_NUMBER (XEXP (base, 0)));
17557 break;
17558
17559 case CODE_LABEL:
17560 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17561 break;
17562
17563 default:
17564 gcc_unreachable ();
17565 }
9878760c 17566
772c5265 17567 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17568 if (TARGET_MINIMAL_TOC)
2bfcf297 17569 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17570 else
17571 {
b6c9286a 17572 fprintf (file, "\t.tc %s", real_name);
9878760c 17573
1875cc88 17574 if (offset < 0)
16fdeb48 17575 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17576 else if (offset)
16fdeb48 17577 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17578
19d2d16f 17579 fputs ("[TC],", file);
1875cc88 17580 }
581bc4de
MM
17581
17582 /* Currently C++ toc references to vtables can be emitted before it
17583 is decided whether the vtable is public or private. If this is
17584 the case, then the linker will eventually complain that there is
17585 a TOC reference to an unknown section. Thus, for vtables only,
17586 we emit the TOC reference to reference the symbol and not the
17587 section. */
28e510bd 17588 if (VTABLE_NAME_P (name))
581bc4de 17589 {
54ee9799 17590 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17591 if (offset < 0)
16fdeb48 17592 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17593 else if (offset > 0)
16fdeb48 17594 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17595 }
17596 else
17597 output_addr_const (file, x);
19d2d16f 17598 putc ('\n', file);
9878760c
RK
17599}
17600\f
17601/* Output an assembler pseudo-op to write an ASCII string of N characters
17602 starting at P to FILE.
17603
17604 On the RS/6000, we have to do this using the .byte operation and
17605 write out special characters outside the quoted string.
17606 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17607 so we must artificially break them up early. */
9878760c
RK
17608
17609void
a2369ed3 17610output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17611{
17612 char c;
17613 int i, count_string;
d330fd93
KG
17614 const char *for_string = "\t.byte \"";
17615 const char *for_decimal = "\t.byte ";
17616 const char *to_close = NULL;
9878760c
RK
17617
17618 count_string = 0;
17619 for (i = 0; i < n; i++)
17620 {
17621 c = *p++;
17622 if (c >= ' ' && c < 0177)
17623 {
17624 if (for_string)
17625 fputs (for_string, file);
17626 putc (c, file);
17627
17628 /* Write two quotes to get one. */
17629 if (c == '"')
17630 {
17631 putc (c, file);
17632 ++count_string;
17633 }
17634
17635 for_string = NULL;
17636 for_decimal = "\"\n\t.byte ";
17637 to_close = "\"\n";
17638 ++count_string;
17639
17640 if (count_string >= 512)
17641 {
17642 fputs (to_close, file);
17643
17644 for_string = "\t.byte \"";
17645 for_decimal = "\t.byte ";
17646 to_close = NULL;
17647 count_string = 0;
17648 }
17649 }
17650 else
17651 {
17652 if (for_decimal)
17653 fputs (for_decimal, file);
17654 fprintf (file, "%d", c);
17655
17656 for_string = "\n\t.byte \"";
17657 for_decimal = ", ";
17658 to_close = "\n";
17659 count_string = 0;
17660 }
17661 }
17662
17663 /* Now close the string if we have written one. Then end the line. */
17664 if (to_close)
9ebbca7d 17665 fputs (to_close, file);
9878760c
RK
17666}
17667\f
17668/* Generate a unique section name for FILENAME for a section type
17669 represented by SECTION_DESC. Output goes into BUF.
17670
17671 SECTION_DESC can be any string, as long as it is different for each
17672 possible section type.
17673
17674 We name the section in the same manner as xlc. The name begins with an
17675 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17676 names) with the last period replaced by the string SECTION_DESC. If
17677 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17678 the name. */
9878760c
RK
17679
17680void
f676971a 17681rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17682 const char *section_desc)
9878760c 17683{
9ebbca7d 17684 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17685 char *p;
17686 int len;
9878760c
RK
17687
17688 after_last_slash = filename;
17689 for (q = filename; *q; q++)
11e5fe42
RK
17690 {
17691 if (*q == '/')
17692 after_last_slash = q + 1;
17693 else if (*q == '.')
17694 last_period = q;
17695 }
9878760c 17696
11e5fe42 17697 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17698 *buf = (char *) xmalloc (len);
9878760c
RK
17699
17700 p = *buf;
17701 *p++ = '_';
17702
17703 for (q = after_last_slash; *q; q++)
17704 {
11e5fe42 17705 if (q == last_period)
c4ad648e 17706 {
9878760c
RK
17707 strcpy (p, section_desc);
17708 p += strlen (section_desc);
e3981aab 17709 break;
c4ad648e 17710 }
9878760c 17711
e9a780ec 17712 else if (ISALNUM (*q))
c4ad648e 17713 *p++ = *q;
9878760c
RK
17714 }
17715
11e5fe42 17716 if (last_period == 0)
9878760c
RK
17717 strcpy (p, section_desc);
17718 else
17719 *p = '\0';
17720}
e165f3f0 17721\f
a4f6c312 17722/* Emit profile function. */
411707f4 17723
411707f4 17724void
a2369ed3 17725output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17726{
858081ad
AH
17727 /* Non-standard profiling for kernels, which just saves LR then calls
17728 _mcount without worrying about arg saves. The idea is to change
17729 the function prologue as little as possible as it isn't easy to
17730 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17731 if (TARGET_PROFILE_KERNEL)
17732 return;
17733
8480e480
CC
17734 if (DEFAULT_ABI == ABI_AIX)
17735 {
9739c90c
JJ
17736#ifndef NO_PROFILE_COUNTERS
17737# define NO_PROFILE_COUNTERS 0
17738#endif
f676971a 17739 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17740 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17741 else
17742 {
17743 char buf[30];
17744 const char *label_name;
17745 rtx fun;
411707f4 17746
9739c90c
JJ
17747 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17748 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17749 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17750
9739c90c
JJ
17751 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17752 fun, Pmode);
17753 }
8480e480 17754 }
ee890fe2
SS
17755 else if (DEFAULT_ABI == ABI_DARWIN)
17756 {
d5fa86ba 17757 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17758 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17759
17760 /* Be conservative and always set this, at least for now. */
17761 current_function_uses_pic_offset_table = 1;
17762
17763#if TARGET_MACHO
17764 /* For PIC code, set up a stub and collect the caller's address
17765 from r0, which is where the prologue puts it. */
11abc112
MM
17766 if (MACHOPIC_INDIRECT
17767 && current_function_uses_pic_offset_table)
17768 caller_addr_regno = 0;
ee890fe2
SS
17769#endif
17770 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17771 0, VOIDmode, 1,
17772 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17773 }
411707f4
CC
17774}
17775
a4f6c312 17776/* Write function profiler code. */
e165f3f0
RK
17777
17778void
a2369ed3 17779output_function_profiler (FILE *file, int labelno)
e165f3f0 17780{
3daf36a4 17781 char buf[100];
e165f3f0 17782
38c1f2d7 17783 switch (DEFAULT_ABI)
3daf36a4 17784 {
38c1f2d7 17785 default:
37409796 17786 gcc_unreachable ();
38c1f2d7
MM
17787
17788 case ABI_V4:
09eeeacb
AM
17789 if (!TARGET_32BIT)
17790 {
d4ee4d25 17791 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17792 return;
17793 }
ffcfcb5f 17794 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17795 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17796 if (NO_PROFILE_COUNTERS)
17797 {
17798 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17799 reg_names[0], reg_names[1]);
17800 }
17801 else if (TARGET_SECURE_PLT && flag_pic)
17802 {
17803 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17804 reg_names[0], reg_names[1]);
17805 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17806 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17807 reg_names[12], reg_names[12]);
17808 assemble_name (file, buf);
17809 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17810 assemble_name (file, buf);
17811 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17812 }
17813 else if (flag_pic == 1)
38c1f2d7 17814 {
dfdfa60f 17815 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17816 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17817 reg_names[0], reg_names[1]);
17167fd8 17818 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17819 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17820 assemble_name (file, buf);
17167fd8 17821 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17822 }
9ebbca7d 17823 else if (flag_pic > 1)
38c1f2d7 17824 {
71625f3d
AM
17825 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17826 reg_names[0], reg_names[1]);
9ebbca7d 17827 /* Now, we need to get the address of the label. */
71625f3d 17828 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17829 assemble_name (file, buf);
9ebbca7d
GK
17830 fputs ("-.\n1:", file);
17831 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17832 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17833 reg_names[0], reg_names[11]);
17834 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17835 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17836 }
38c1f2d7
MM
17837 else
17838 {
17167fd8 17839 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17840 assemble_name (file, buf);
dfdfa60f 17841 fputs ("@ha\n", file);
71625f3d
AM
17842 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17843 reg_names[0], reg_names[1]);
a260abc9 17844 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17845 assemble_name (file, buf);
17167fd8 17846 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17847 }
17848
50d440bc 17849 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17850 fprintf (file, "\tbl %s%s\n",
17851 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17852 break;
17853
17854 case ABI_AIX:
ee890fe2 17855 case ABI_DARWIN:
ffcfcb5f
AM
17856 if (!TARGET_PROFILE_KERNEL)
17857 {
a3c9585f 17858 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17859 }
17860 else
17861 {
37409796 17862 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17863
17864 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17865 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17866
6de9cd9a 17867 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17868 {
17869 asm_fprintf (file, "\tstd %s,24(%s)\n",
17870 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17871 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17872 asm_fprintf (file, "\tld %s,24(%s)\n",
17873 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17874 }
17875 else
17876 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17877 }
38c1f2d7
MM
17878 break;
17879 }
e165f3f0 17880}
a251ffd0 17881
b54cf83a 17882\f
44cd321e
PS
17883
17884/* The following variable value is the last issued insn. */
17885
17886static rtx last_scheduled_insn;
17887
17888/* The following variable helps to balance issuing of load and
17889 store instructions */
17890
17891static int load_store_pendulum;
17892
b54cf83a
DE
17893/* Power4 load update and store update instructions are cracked into a
17894 load or store and an integer insn which are executed in the same cycle.
17895 Branches have their own dispatch slot which does not count against the
17896 GCC issue rate, but it changes the program flow so there are no other
17897 instructions to issue in this cycle. */
17898
17899static int
f676971a
EC
17900rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17901 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17902 rtx insn, int more)
b54cf83a 17903{
44cd321e 17904 last_scheduled_insn = insn;
b54cf83a
DE
17905 if (GET_CODE (PATTERN (insn)) == USE
17906 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17907 {
17908 cached_can_issue_more = more;
17909 return cached_can_issue_more;
17910 }
17911
17912 if (insn_terminates_group_p (insn, current_group))
17913 {
17914 cached_can_issue_more = 0;
17915 return cached_can_issue_more;
17916 }
b54cf83a 17917
d296e02e
AP
17918 /* If no reservation, but reach here */
17919 if (recog_memoized (insn) < 0)
17920 return more;
17921
ec507f2d 17922 if (rs6000_sched_groups)
b54cf83a 17923 {
cbe26ab8 17924 if (is_microcoded_insn (insn))
44cd321e 17925 cached_can_issue_more = 0;
cbe26ab8 17926 else if (is_cracked_insn (insn))
44cd321e
PS
17927 cached_can_issue_more = more > 2 ? more - 2 : 0;
17928 else
17929 cached_can_issue_more = more - 1;
17930
17931 return cached_can_issue_more;
b54cf83a 17932 }
165b263e 17933
d296e02e
AP
17934 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17935 return 0;
17936
44cd321e
PS
17937 cached_can_issue_more = more - 1;
17938 return cached_can_issue_more;
b54cf83a
DE
17939}
17940
a251ffd0
TG
17941/* Adjust the cost of a scheduling dependency. Return the new cost of
17942 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17943
c237e94a 17944static int
0a4f0294 17945rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17946{
44cd321e 17947 enum attr_type attr_type;
a251ffd0 17948
44cd321e 17949 if (! recog_memoized (insn))
a251ffd0
TG
17950 return 0;
17951
44cd321e 17952 switch (REG_NOTE_KIND (link))
a251ffd0 17953 {
44cd321e
PS
17954 case REG_DEP_TRUE:
17955 {
17956 /* Data dependency; DEP_INSN writes a register that INSN reads
17957 some cycles later. */
17958
17959 /* Separate a load from a narrower, dependent store. */
17960 if (rs6000_sched_groups
17961 && GET_CODE (PATTERN (insn)) == SET
17962 && GET_CODE (PATTERN (dep_insn)) == SET
17963 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17964 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17965 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17966 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17967 return cost + 14;
17968
17969 attr_type = get_attr_type (insn);
17970
17971 switch (attr_type)
17972 {
17973 case TYPE_JMPREG:
17974 /* Tell the first scheduling pass about the latency between
17975 a mtctr and bctr (and mtlr and br/blr). The first
17976 scheduling pass will not know about this latency since
17977 the mtctr instruction, which has the latency associated
17978 to it, will be generated by reload. */
17979 return TARGET_POWER ? 5 : 4;
17980 case TYPE_BRANCH:
17981 /* Leave some extra cycles between a compare and its
17982 dependent branch, to inhibit expensive mispredicts. */
17983 if ((rs6000_cpu_attr == CPU_PPC603
17984 || rs6000_cpu_attr == CPU_PPC604
17985 || rs6000_cpu_attr == CPU_PPC604E
17986 || rs6000_cpu_attr == CPU_PPC620
17987 || rs6000_cpu_attr == CPU_PPC630
17988 || rs6000_cpu_attr == CPU_PPC750
17989 || rs6000_cpu_attr == CPU_PPC7400
17990 || rs6000_cpu_attr == CPU_PPC7450
17991 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17992 || rs6000_cpu_attr == CPU_POWER5
17993 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17994 && recog_memoized (dep_insn)
17995 && (INSN_CODE (dep_insn) >= 0))
982afe02 17996
44cd321e
PS
17997 switch (get_attr_type (dep_insn))
17998 {
17999 case TYPE_CMP:
18000 case TYPE_COMPARE:
18001 case TYPE_DELAYED_COMPARE:
18002 case TYPE_IMUL_COMPARE:
18003 case TYPE_LMUL_COMPARE:
18004 case TYPE_FPCOMPARE:
18005 case TYPE_CR_LOGICAL:
18006 case TYPE_DELAYED_CR:
18007 return cost + 2;
18008 default:
18009 break;
18010 }
18011 break;
18012
18013 case TYPE_STORE:
18014 case TYPE_STORE_U:
18015 case TYPE_STORE_UX:
18016 case TYPE_FPSTORE:
18017 case TYPE_FPSTORE_U:
18018 case TYPE_FPSTORE_UX:
18019 if ((rs6000_cpu == PROCESSOR_POWER6)
18020 && recog_memoized (dep_insn)
18021 && (INSN_CODE (dep_insn) >= 0))
18022 {
18023
18024 if (GET_CODE (PATTERN (insn)) != SET)
18025 /* If this happens, we have to extend this to schedule
18026 optimally. Return default for now. */
18027 return cost;
18028
18029 /* Adjust the cost for the case where the value written
18030 by a fixed point operation is used as the address
18031 gen value on a store. */
18032 switch (get_attr_type (dep_insn))
18033 {
18034 case TYPE_LOAD:
18035 case TYPE_LOAD_U:
18036 case TYPE_LOAD_UX:
18037 case TYPE_CNTLZ:
18038 {
18039 if (! store_data_bypass_p (dep_insn, insn))
18040 return 4;
18041 break;
18042 }
18043 case TYPE_LOAD_EXT:
18044 case TYPE_LOAD_EXT_U:
18045 case TYPE_LOAD_EXT_UX:
18046 case TYPE_VAR_SHIFT_ROTATE:
18047 case TYPE_VAR_DELAYED_COMPARE:
18048 {
18049 if (! store_data_bypass_p (dep_insn, insn))
18050 return 6;
18051 break;
18052 }
18053 case TYPE_INTEGER:
18054 case TYPE_COMPARE:
18055 case TYPE_FAST_COMPARE:
18056 case TYPE_EXTS:
18057 case TYPE_SHIFT:
18058 case TYPE_INSERT_WORD:
18059 case TYPE_INSERT_DWORD:
18060 case TYPE_FPLOAD_U:
18061 case TYPE_FPLOAD_UX:
18062 case TYPE_STORE_U:
18063 case TYPE_STORE_UX:
18064 case TYPE_FPSTORE_U:
18065 case TYPE_FPSTORE_UX:
18066 {
18067 if (! store_data_bypass_p (dep_insn, insn))
18068 return 3;
18069 break;
18070 }
18071 case TYPE_IMUL:
18072 case TYPE_IMUL2:
18073 case TYPE_IMUL3:
18074 case TYPE_LMUL:
18075 case TYPE_IMUL_COMPARE:
18076 case TYPE_LMUL_COMPARE:
18077 {
18078 if (! store_data_bypass_p (dep_insn, insn))
18079 return 17;
18080 break;
18081 }
18082 case TYPE_IDIV:
18083 {
18084 if (! store_data_bypass_p (dep_insn, insn))
18085 return 45;
18086 break;
18087 }
18088 case TYPE_LDIV:
18089 {
18090 if (! store_data_bypass_p (dep_insn, insn))
18091 return 57;
18092 break;
18093 }
18094 default:
18095 break;
18096 }
18097 }
18098 break;
18099
18100 case TYPE_LOAD:
18101 case TYPE_LOAD_U:
18102 case TYPE_LOAD_UX:
18103 case TYPE_LOAD_EXT:
18104 case TYPE_LOAD_EXT_U:
18105 case TYPE_LOAD_EXT_UX:
18106 if ((rs6000_cpu == PROCESSOR_POWER6)
18107 && recog_memoized (dep_insn)
18108 && (INSN_CODE (dep_insn) >= 0))
18109 {
18110
18111 /* Adjust the cost for the case where the value written
18112 by a fixed point instruction is used within the address
18113 gen portion of a subsequent load(u)(x) */
18114 switch (get_attr_type (dep_insn))
18115 {
18116 case TYPE_LOAD:
18117 case TYPE_LOAD_U:
18118 case TYPE_LOAD_UX:
18119 case TYPE_CNTLZ:
18120 {
18121 if (set_to_load_agen (dep_insn, insn))
18122 return 4;
18123 break;
18124 }
18125 case TYPE_LOAD_EXT:
18126 case TYPE_LOAD_EXT_U:
18127 case TYPE_LOAD_EXT_UX:
18128 case TYPE_VAR_SHIFT_ROTATE:
18129 case TYPE_VAR_DELAYED_COMPARE:
18130 {
18131 if (set_to_load_agen (dep_insn, insn))
18132 return 6;
18133 break;
18134 }
18135 case TYPE_INTEGER:
18136 case TYPE_COMPARE:
18137 case TYPE_FAST_COMPARE:
18138 case TYPE_EXTS:
18139 case TYPE_SHIFT:
18140 case TYPE_INSERT_WORD:
18141 case TYPE_INSERT_DWORD:
18142 case TYPE_FPLOAD_U:
18143 case TYPE_FPLOAD_UX:
18144 case TYPE_STORE_U:
18145 case TYPE_STORE_UX:
18146 case TYPE_FPSTORE_U:
18147 case TYPE_FPSTORE_UX:
18148 {
18149 if (set_to_load_agen (dep_insn, insn))
18150 return 3;
18151 break;
18152 }
18153 case TYPE_IMUL:
18154 case TYPE_IMUL2:
18155 case TYPE_IMUL3:
18156 case TYPE_LMUL:
18157 case TYPE_IMUL_COMPARE:
18158 case TYPE_LMUL_COMPARE:
18159 {
18160 if (set_to_load_agen (dep_insn, insn))
18161 return 17;
18162 break;
18163 }
18164 case TYPE_IDIV:
18165 {
18166 if (set_to_load_agen (dep_insn, insn))
18167 return 45;
18168 break;
18169 }
18170 case TYPE_LDIV:
18171 {
18172 if (set_to_load_agen (dep_insn, insn))
18173 return 57;
18174 break;
18175 }
18176 default:
18177 break;
18178 }
18179 }
18180 break;
18181
18182 case TYPE_FPLOAD:
18183 if ((rs6000_cpu == PROCESSOR_POWER6)
18184 && recog_memoized (dep_insn)
18185 && (INSN_CODE (dep_insn) >= 0)
18186 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18187 return 2;
18188
18189 default:
18190 break;
18191 }
c9dbf840 18192
a251ffd0 18193 /* Fall out to return default cost. */
44cd321e
PS
18194 }
18195 break;
18196
18197 case REG_DEP_OUTPUT:
18198 /* Output dependency; DEP_INSN writes a register that INSN writes some
18199 cycles later. */
18200 if ((rs6000_cpu == PROCESSOR_POWER6)
18201 && recog_memoized (dep_insn)
18202 && (INSN_CODE (dep_insn) >= 0))
18203 {
18204 attr_type = get_attr_type (insn);
18205
18206 switch (attr_type)
18207 {
18208 case TYPE_FP:
18209 if (get_attr_type (dep_insn) == TYPE_FP)
18210 return 1;
18211 break;
18212 case TYPE_FPLOAD:
18213 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18214 return 2;
18215 break;
18216 default:
18217 break;
18218 }
18219 }
18220 case REG_DEP_ANTI:
18221 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18222 cycles later. */
18223 return 0;
18224
18225 default:
18226 gcc_unreachable ();
a251ffd0
TG
18227 }
18228
18229 return cost;
18230}
b6c9286a 18231
cbe26ab8 18232/* The function returns a true if INSN is microcoded.
839a4992 18233 Return false otherwise. */
cbe26ab8
DN
18234
18235static bool
18236is_microcoded_insn (rtx insn)
18237{
18238 if (!insn || !INSN_P (insn)
18239 || GET_CODE (PATTERN (insn)) == USE
18240 || GET_CODE (PATTERN (insn)) == CLOBBER)
18241 return false;
18242
d296e02e
AP
18243 if (rs6000_cpu_attr == CPU_CELL)
18244 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18245
ec507f2d 18246 if (rs6000_sched_groups)
cbe26ab8
DN
18247 {
18248 enum attr_type type = get_attr_type (insn);
18249 if (type == TYPE_LOAD_EXT_U
18250 || type == TYPE_LOAD_EXT_UX
18251 || type == TYPE_LOAD_UX
18252 || type == TYPE_STORE_UX
18253 || type == TYPE_MFCR)
c4ad648e 18254 return true;
cbe26ab8
DN
18255 }
18256
18257 return false;
18258}
18259
cbe26ab8
DN
18260/* The function returns true if INSN is cracked into 2 instructions
18261 by the processor (and therefore occupies 2 issue slots). */
18262
18263static bool
18264is_cracked_insn (rtx insn)
18265{
18266 if (!insn || !INSN_P (insn)
18267 || GET_CODE (PATTERN (insn)) == USE
18268 || GET_CODE (PATTERN (insn)) == CLOBBER)
18269 return false;
18270
ec507f2d 18271 if (rs6000_sched_groups)
cbe26ab8
DN
18272 {
18273 enum attr_type type = get_attr_type (insn);
18274 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18275 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18276 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18277 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18278 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18279 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18280 || type == TYPE_IDIV || type == TYPE_LDIV
18281 || type == TYPE_INSERT_WORD)
18282 return true;
cbe26ab8
DN
18283 }
18284
18285 return false;
18286}
18287
18288/* The function returns true if INSN can be issued only from
a3c9585f 18289 the branch slot. */
cbe26ab8
DN
18290
18291static bool
18292is_branch_slot_insn (rtx insn)
18293{
18294 if (!insn || !INSN_P (insn)
18295 || GET_CODE (PATTERN (insn)) == USE
18296 || GET_CODE (PATTERN (insn)) == CLOBBER)
18297 return false;
18298
ec507f2d 18299 if (rs6000_sched_groups)
cbe26ab8
DN
18300 {
18301 enum attr_type type = get_attr_type (insn);
18302 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18303 return true;
cbe26ab8
DN
18304 return false;
18305 }
18306
18307 return false;
18308}
79ae11c4 18309
44cd321e
PS
18310/* The function returns true if out_inst sets a value that is
18311 used in the address generation computation of in_insn */
18312static bool
18313set_to_load_agen (rtx out_insn, rtx in_insn)
18314{
18315 rtx out_set, in_set;
18316
18317 /* For performance reasons, only handle the simple case where
18318 both loads are a single_set. */
18319 out_set = single_set (out_insn);
18320 if (out_set)
18321 {
18322 in_set = single_set (in_insn);
18323 if (in_set)
18324 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18325 }
18326
18327 return false;
18328}
18329
18330/* The function returns true if the target storage location of
18331 out_insn is adjacent to the target storage location of in_insn */
18332/* Return 1 if memory locations are adjacent. */
18333
18334static bool
18335adjacent_mem_locations (rtx insn1, rtx insn2)
18336{
18337
e3a0e200
PB
18338 rtx a = get_store_dest (PATTERN (insn1));
18339 rtx b = get_store_dest (PATTERN (insn2));
18340
44cd321e
PS
18341 if ((GET_CODE (XEXP (a, 0)) == REG
18342 || (GET_CODE (XEXP (a, 0)) == PLUS
18343 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18344 && (GET_CODE (XEXP (b, 0)) == REG
18345 || (GET_CODE (XEXP (b, 0)) == PLUS
18346 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18347 {
f98e8938 18348 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18349 rtx reg0, reg1;
44cd321e
PS
18350
18351 if (GET_CODE (XEXP (a, 0)) == PLUS)
18352 {
18353 reg0 = XEXP (XEXP (a, 0), 0);
18354 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18355 }
18356 else
18357 reg0 = XEXP (a, 0);
18358
18359 if (GET_CODE (XEXP (b, 0)) == PLUS)
18360 {
18361 reg1 = XEXP (XEXP (b, 0), 0);
18362 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18363 }
18364 else
18365 reg1 = XEXP (b, 0);
18366
18367 val_diff = val1 - val0;
18368
18369 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18370 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18371 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18372 }
18373
18374 return false;
18375}
18376
a4f6c312 18377/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18378 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18379 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18380 define this macro if you do not need to adjust the scheduling
18381 priorities of insns. */
bef84347 18382
c237e94a 18383static int
a2369ed3 18384rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18385{
a4f6c312
SS
18386 /* On machines (like the 750) which have asymmetric integer units,
18387 where one integer unit can do multiply and divides and the other
18388 can't, reduce the priority of multiply/divide so it is scheduled
18389 before other integer operations. */
bef84347
VM
18390
18391#if 0
2c3c49de 18392 if (! INSN_P (insn))
bef84347
VM
18393 return priority;
18394
18395 if (GET_CODE (PATTERN (insn)) == USE)
18396 return priority;
18397
18398 switch (rs6000_cpu_attr) {
18399 case CPU_PPC750:
18400 switch (get_attr_type (insn))
18401 {
18402 default:
18403 break;
18404
18405 case TYPE_IMUL:
18406 case TYPE_IDIV:
3cb999d8
DE
18407 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18408 priority, priority);
bef84347
VM
18409 if (priority >= 0 && priority < 0x01000000)
18410 priority >>= 3;
18411 break;
18412 }
18413 }
18414#endif
18415
44cd321e 18416 if (insn_must_be_first_in_group (insn)
79ae11c4 18417 && reload_completed
f676971a 18418 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18419 && rs6000_sched_restricted_insns_priority)
18420 {
18421
c4ad648e
AM
18422 /* Prioritize insns that can be dispatched only in the first
18423 dispatch slot. */
79ae11c4 18424 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18425 /* Attach highest priority to insn. This means that in
18426 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18427 precede 'priority' (critical path) considerations. */
f676971a 18428 return current_sched_info->sched_max_insns_priority;
79ae11c4 18429 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18430 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18431 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18432 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18433 return (priority + 1);
18434 }
79ae11c4 18435
44cd321e
PS
18436 if (rs6000_cpu == PROCESSOR_POWER6
18437 && ((load_store_pendulum == -2 && is_load_insn (insn))
18438 || (load_store_pendulum == 2 && is_store_insn (insn))))
18439 /* Attach highest priority to insn if the scheduler has just issued two
18440 stores and this instruction is a load, or two loads and this instruction
18441 is a store. Power6 wants loads and stores scheduled alternately
18442 when possible */
18443 return current_sched_info->sched_max_insns_priority;
18444
bef84347
VM
18445 return priority;
18446}
18447
d296e02e
AP
18448/* Return true if the instruction is nonpipelined on the Cell. */
18449static bool
18450is_nonpipeline_insn (rtx insn)
18451{
18452 enum attr_type type;
18453 if (!insn || !INSN_P (insn)
18454 || GET_CODE (PATTERN (insn)) == USE
18455 || GET_CODE (PATTERN (insn)) == CLOBBER)
18456 return false;
18457
18458 type = get_attr_type (insn);
18459 if (type == TYPE_IMUL
18460 || type == TYPE_IMUL2
18461 || type == TYPE_IMUL3
18462 || type == TYPE_LMUL
18463 || type == TYPE_IDIV
18464 || type == TYPE_LDIV
18465 || type == TYPE_SDIV
18466 || type == TYPE_DDIV
18467 || type == TYPE_SSQRT
18468 || type == TYPE_DSQRT
18469 || type == TYPE_MFCR
18470 || type == TYPE_MFCRF
18471 || type == TYPE_MFJMPR)
18472 {
18473 return true;
18474 }
18475 return false;
18476}
18477
18478
a4f6c312
SS
18479/* Return how many instructions the machine can issue per cycle. */
18480
c237e94a 18481static int
863d938c 18482rs6000_issue_rate (void)
b6c9286a 18483{
3317bab1
DE
18484 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18485 if (!reload_completed)
18486 return 1;
18487
b6c9286a 18488 switch (rs6000_cpu_attr) {
3cb999d8
DE
18489 case CPU_RIOS1: /* ? */
18490 case CPU_RS64A:
18491 case CPU_PPC601: /* ? */
ed947a96 18492 case CPU_PPC7450:
3cb999d8 18493 return 3;
b54cf83a 18494 case CPU_PPC440:
b6c9286a 18495 case CPU_PPC603:
bef84347 18496 case CPU_PPC750:
ed947a96 18497 case CPU_PPC7400:
be12c2b0 18498 case CPU_PPC8540:
d296e02e 18499 case CPU_CELL:
fa41c305
EW
18500 case CPU_PPCE300C2:
18501 case CPU_PPCE300C3:
f676971a 18502 return 2;
3cb999d8 18503 case CPU_RIOS2:
b6c9286a 18504 case CPU_PPC604:
19684119 18505 case CPU_PPC604E:
b6c9286a 18506 case CPU_PPC620:
3cb999d8 18507 case CPU_PPC630:
b6c9286a 18508 return 4;
cbe26ab8 18509 case CPU_POWER4:
ec507f2d 18510 case CPU_POWER5:
44cd321e 18511 case CPU_POWER6:
cbe26ab8 18512 return 5;
b6c9286a
MM
18513 default:
18514 return 1;
18515 }
18516}
18517
be12c2b0
VM
18518/* Return how many instructions to look ahead for better insn
18519 scheduling. */
18520
18521static int
863d938c 18522rs6000_use_sched_lookahead (void)
be12c2b0
VM
18523{
18524 if (rs6000_cpu_attr == CPU_PPC8540)
18525 return 4;
d296e02e
AP
18526 if (rs6000_cpu_attr == CPU_CELL)
18527 return (reload_completed ? 8 : 0);
be12c2b0
VM
18528 return 0;
18529}
18530
d296e02e
AP
18531/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18532static int
18533rs6000_use_sched_lookahead_guard (rtx insn)
18534{
18535 if (rs6000_cpu_attr != CPU_CELL)
18536 return 1;
18537
18538 if (insn == NULL_RTX || !INSN_P (insn))
18539 abort ();
982afe02 18540
d296e02e
AP
18541 if (!reload_completed
18542 || is_nonpipeline_insn (insn)
18543 || is_microcoded_insn (insn))
18544 return 0;
18545
18546 return 1;
18547}
18548
569fa502
DN
18549/* Determine is PAT refers to memory. */
18550
18551static bool
18552is_mem_ref (rtx pat)
18553{
18554 const char * fmt;
18555 int i, j;
18556 bool ret = false;
18557
1de59bbd
DE
18558 /* stack_tie does not produce any real memory traffic. */
18559 if (GET_CODE (pat) == UNSPEC
18560 && XINT (pat, 1) == UNSPEC_TIE)
18561 return false;
18562
569fa502
DN
18563 if (GET_CODE (pat) == MEM)
18564 return true;
18565
18566 /* Recursively process the pattern. */
18567 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18568
18569 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18570 {
18571 if (fmt[i] == 'e')
18572 ret |= is_mem_ref (XEXP (pat, i));
18573 else if (fmt[i] == 'E')
18574 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18575 ret |= is_mem_ref (XVECEXP (pat, i, j));
18576 }
18577
18578 return ret;
18579}
18580
18581/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18582
569fa502
DN
18583static bool
18584is_load_insn1 (rtx pat)
18585{
18586 if (!pat || pat == NULL_RTX)
18587 return false;
18588
18589 if (GET_CODE (pat) == SET)
18590 return is_mem_ref (SET_SRC (pat));
18591
18592 if (GET_CODE (pat) == PARALLEL)
18593 {
18594 int i;
18595
18596 for (i = 0; i < XVECLEN (pat, 0); i++)
18597 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18598 return true;
18599 }
18600
18601 return false;
18602}
18603
18604/* Determine if INSN loads from memory. */
18605
18606static bool
18607is_load_insn (rtx insn)
18608{
18609 if (!insn || !INSN_P (insn))
18610 return false;
18611
18612 if (GET_CODE (insn) == CALL_INSN)
18613 return false;
18614
18615 return is_load_insn1 (PATTERN (insn));
18616}
18617
18618/* Determine if PAT is a PATTERN of a store insn. */
18619
18620static bool
18621is_store_insn1 (rtx pat)
18622{
18623 if (!pat || pat == NULL_RTX)
18624 return false;
18625
18626 if (GET_CODE (pat) == SET)
18627 return is_mem_ref (SET_DEST (pat));
18628
18629 if (GET_CODE (pat) == PARALLEL)
18630 {
18631 int i;
18632
18633 for (i = 0; i < XVECLEN (pat, 0); i++)
18634 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18635 return true;
18636 }
18637
18638 return false;
18639}
18640
18641/* Determine if INSN stores to memory. */
18642
18643static bool
18644is_store_insn (rtx insn)
18645{
18646 if (!insn || !INSN_P (insn))
18647 return false;
18648
18649 return is_store_insn1 (PATTERN (insn));
18650}
18651
e3a0e200
PB
18652/* Return the dest of a store insn. */
18653
18654static rtx
18655get_store_dest (rtx pat)
18656{
18657 gcc_assert (is_store_insn1 (pat));
18658
18659 if (GET_CODE (pat) == SET)
18660 return SET_DEST (pat);
18661 else if (GET_CODE (pat) == PARALLEL)
18662 {
18663 int i;
18664
18665 for (i = 0; i < XVECLEN (pat, 0); i++)
18666 {
18667 rtx inner_pat = XVECEXP (pat, 0, i);
18668 if (GET_CODE (inner_pat) == SET
18669 && is_mem_ref (SET_DEST (inner_pat)))
18670 return inner_pat;
18671 }
18672 }
18673 /* We shouldn't get here, because we should have either a simple
18674 store insn or a store with update which are covered above. */
18675 gcc_unreachable();
18676}
18677
569fa502
DN
18678/* Returns whether the dependence between INSN and NEXT is considered
18679 costly by the given target. */
18680
18681static bool
b198261f 18682rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18683{
b198261f
MK
18684 rtx insn;
18685 rtx next;
18686
aabcd309 18687 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18688 allow all dependent insns in the same group.
569fa502
DN
18689 This is the most aggressive option. */
18690 if (rs6000_sched_costly_dep == no_dep_costly)
18691 return false;
18692
f676971a 18693 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18694 do not allow dependent instructions in the same group.
18695 This is the most conservative option. */
18696 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18697 return true;
569fa502 18698
b198261f
MK
18699 insn = DEP_PRO (dep);
18700 next = DEP_CON (dep);
18701
f676971a
EC
18702 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18703 && is_load_insn (next)
569fa502
DN
18704 && is_store_insn (insn))
18705 /* Prevent load after store in the same group. */
18706 return true;
18707
18708 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18709 && is_load_insn (next)
569fa502 18710 && is_store_insn (insn)
e2f6ff94 18711 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18712 /* Prevent load after store in the same group if it is a true
18713 dependence. */
569fa502 18714 return true;
f676971a
EC
18715
18716 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18717 and will not be scheduled in the same group. */
18718 if (rs6000_sched_costly_dep <= max_dep_latency
18719 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18720 return true;
18721
18722 return false;
18723}
18724
f676971a 18725/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18726 skipping any "non-active" insns - insns that will not actually occupy
18727 an issue slot. Return NULL_RTX if such an insn is not found. */
18728
18729static rtx
18730get_next_active_insn (rtx insn, rtx tail)
18731{
f489aff8 18732 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18733 return NULL_RTX;
18734
f489aff8 18735 while (1)
cbe26ab8 18736 {
f489aff8
AM
18737 insn = NEXT_INSN (insn);
18738 if (insn == NULL_RTX || insn == tail)
18739 return NULL_RTX;
cbe26ab8 18740
f489aff8
AM
18741 if (CALL_P (insn)
18742 || JUMP_P (insn)
18743 || (NONJUMP_INSN_P (insn)
18744 && GET_CODE (PATTERN (insn)) != USE
18745 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18746 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18747 break;
18748 }
18749 return insn;
cbe26ab8
DN
18750}
18751
44cd321e
PS
18752/* We are about to begin issuing insns for this clock cycle. */
18753
18754static int
18755rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18756 rtx *ready ATTRIBUTE_UNUSED,
18757 int *pn_ready ATTRIBUTE_UNUSED,
18758 int clock_var ATTRIBUTE_UNUSED)
18759{
d296e02e
AP
18760 int n_ready = *pn_ready;
18761
44cd321e
PS
18762 if (sched_verbose)
18763 fprintf (dump, "// rs6000_sched_reorder :\n");
18764
d296e02e
AP
18765 /* Reorder the ready list, if the second to last ready insn
18766 is a nonepipeline insn. */
18767 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18768 {
18769 if (is_nonpipeline_insn (ready[n_ready - 1])
18770 && (recog_memoized (ready[n_ready - 2]) > 0))
18771 /* Simply swap first two insns. */
18772 {
18773 rtx tmp = ready[n_ready - 1];
18774 ready[n_ready - 1] = ready[n_ready - 2];
18775 ready[n_ready - 2] = tmp;
18776 }
18777 }
18778
44cd321e
PS
18779 if (rs6000_cpu == PROCESSOR_POWER6)
18780 load_store_pendulum = 0;
18781
18782 return rs6000_issue_rate ();
18783}
18784
18785/* Like rs6000_sched_reorder, but called after issuing each insn. */
18786
18787static int
18788rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18789 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18790{
18791 if (sched_verbose)
18792 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18793
18794 /* For Power6, we need to handle some special cases to try and keep the
18795 store queue from overflowing and triggering expensive flushes.
18796
18797 This code monitors how load and store instructions are being issued
18798 and skews the ready list one way or the other to increase the likelihood
18799 that a desired instruction is issued at the proper time.
18800
18801 A couple of things are done. First, we maintain a "load_store_pendulum"
18802 to track the current state of load/store issue.
18803
18804 - If the pendulum is at zero, then no loads or stores have been
18805 issued in the current cycle so we do nothing.
18806
18807 - If the pendulum is 1, then a single load has been issued in this
18808 cycle and we attempt to locate another load in the ready list to
18809 issue with it.
18810
2f8e468b 18811 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18812 issued in this cycle, so we increase the priority of the first load
18813 in the ready list to increase it's likelihood of being chosen first
18814 in the next cycle.
18815
18816 - If the pendulum is -1, then a single store has been issued in this
18817 cycle and we attempt to locate another store in the ready list to
18818 issue with it, preferring a store to an adjacent memory location to
18819 facilitate store pairing in the store queue.
18820
18821 - If the pendulum is 2, then two loads have already been
18822 issued in this cycle, so we increase the priority of the first store
18823 in the ready list to increase it's likelihood of being chosen first
18824 in the next cycle.
18825
18826 - If the pendulum < -2 or > 2, then do nothing.
18827
18828 Note: This code covers the most common scenarios. There exist non
18829 load/store instructions which make use of the LSU and which
18830 would need to be accounted for to strictly model the behavior
18831 of the machine. Those instructions are currently unaccounted
18832 for to help minimize compile time overhead of this code.
18833 */
18834 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18835 {
18836 int pos;
18837 int i;
18838 rtx tmp;
18839
18840 if (is_store_insn (last_scheduled_insn))
18841 /* Issuing a store, swing the load_store_pendulum to the left */
18842 load_store_pendulum--;
18843 else if (is_load_insn (last_scheduled_insn))
18844 /* Issuing a load, swing the load_store_pendulum to the right */
18845 load_store_pendulum++;
18846 else
18847 return cached_can_issue_more;
18848
18849 /* If the pendulum is balanced, or there is only one instruction on
18850 the ready list, then all is well, so return. */
18851 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18852 return cached_can_issue_more;
18853
18854 if (load_store_pendulum == 1)
18855 {
18856 /* A load has been issued in this cycle. Scan the ready list
18857 for another load to issue with it */
18858 pos = *pn_ready-1;
18859
18860 while (pos >= 0)
18861 {
18862 if (is_load_insn (ready[pos]))
18863 {
18864 /* Found a load. Move it to the head of the ready list,
18865 and adjust it's priority so that it is more likely to
18866 stay there */
18867 tmp = ready[pos];
18868 for (i=pos; i<*pn_ready-1; i++)
18869 ready[i] = ready[i + 1];
18870 ready[*pn_ready-1] = tmp;
18871 if INSN_PRIORITY_KNOWN (tmp)
18872 INSN_PRIORITY (tmp)++;
18873 break;
18874 }
18875 pos--;
18876 }
18877 }
18878 else if (load_store_pendulum == -2)
18879 {
18880 /* Two stores have been issued in this cycle. Increase the
18881 priority of the first load in the ready list to favor it for
18882 issuing in the next cycle. */
18883 pos = *pn_ready-1;
18884
18885 while (pos >= 0)
18886 {
18887 if (is_load_insn (ready[pos])
18888 && INSN_PRIORITY_KNOWN (ready[pos]))
18889 {
18890 INSN_PRIORITY (ready[pos])++;
18891
18892 /* Adjust the pendulum to account for the fact that a load
18893 was found and increased in priority. This is to prevent
18894 increasing the priority of multiple loads */
18895 load_store_pendulum--;
18896
18897 break;
18898 }
18899 pos--;
18900 }
18901 }
18902 else if (load_store_pendulum == -1)
18903 {
18904 /* A store has been issued in this cycle. Scan the ready list for
18905 another store to issue with it, preferring a store to an adjacent
18906 memory location */
18907 int first_store_pos = -1;
18908
18909 pos = *pn_ready-1;
18910
18911 while (pos >= 0)
18912 {
18913 if (is_store_insn (ready[pos]))
18914 {
18915 /* Maintain the index of the first store found on the
18916 list */
18917 if (first_store_pos == -1)
18918 first_store_pos = pos;
18919
18920 if (is_store_insn (last_scheduled_insn)
18921 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18922 {
18923 /* Found an adjacent store. Move it to the head of the
18924 ready list, and adjust it's priority so that it is
18925 more likely to stay there */
18926 tmp = ready[pos];
18927 for (i=pos; i<*pn_ready-1; i++)
18928 ready[i] = ready[i + 1];
18929 ready[*pn_ready-1] = tmp;
18930 if INSN_PRIORITY_KNOWN (tmp)
18931 INSN_PRIORITY (tmp)++;
18932 first_store_pos = -1;
18933
18934 break;
18935 };
18936 }
18937 pos--;
18938 }
18939
18940 if (first_store_pos >= 0)
18941 {
18942 /* An adjacent store wasn't found, but a non-adjacent store was,
18943 so move the non-adjacent store to the front of the ready
18944 list, and adjust its priority so that it is more likely to
18945 stay there. */
18946 tmp = ready[first_store_pos];
18947 for (i=first_store_pos; i<*pn_ready-1; i++)
18948 ready[i] = ready[i + 1];
18949 ready[*pn_ready-1] = tmp;
18950 if INSN_PRIORITY_KNOWN (tmp)
18951 INSN_PRIORITY (tmp)++;
18952 }
18953 }
18954 else if (load_store_pendulum == 2)
18955 {
18956 /* Two loads have been issued in this cycle. Increase the priority
18957 of the first store in the ready list to favor it for issuing in
18958 the next cycle. */
18959 pos = *pn_ready-1;
18960
18961 while (pos >= 0)
18962 {
18963 if (is_store_insn (ready[pos])
18964 && INSN_PRIORITY_KNOWN (ready[pos]))
18965 {
18966 INSN_PRIORITY (ready[pos])++;
18967
18968 /* Adjust the pendulum to account for the fact that a store
18969 was found and increased in priority. This is to prevent
18970 increasing the priority of multiple stores */
18971 load_store_pendulum++;
18972
18973 break;
18974 }
18975 pos--;
18976 }
18977 }
18978 }
18979
18980 return cached_can_issue_more;
18981}
18982
839a4992 18983/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18984 of group WHICH_GROUP.
18985
18986 If WHICH_GROUP == current_group, this function will return true if INSN
18987 causes the termination of the current group (i.e, the dispatch group to
18988 which INSN belongs). This means that INSN will be the last insn in the
18989 group it belongs to.
18990
18991 If WHICH_GROUP == previous_group, this function will return true if INSN
18992 causes the termination of the previous group (i.e, the dispatch group that
18993 precedes the group to which INSN belongs). This means that INSN will be
18994 the first insn in the group it belongs to). */
18995
18996static bool
18997insn_terminates_group_p (rtx insn, enum group_termination which_group)
18998{
44cd321e 18999 bool first, last;
cbe26ab8
DN
19000
19001 if (! insn)
19002 return false;
569fa502 19003
44cd321e
PS
19004 first = insn_must_be_first_in_group (insn);
19005 last = insn_must_be_last_in_group (insn);
cbe26ab8 19006
44cd321e 19007 if (first && last)
cbe26ab8
DN
19008 return true;
19009
19010 if (which_group == current_group)
44cd321e 19011 return last;
cbe26ab8 19012 else if (which_group == previous_group)
44cd321e
PS
19013 return first;
19014
19015 return false;
19016}
19017
19018
19019static bool
19020insn_must_be_first_in_group (rtx insn)
19021{
19022 enum attr_type type;
19023
19024 if (!insn
19025 || insn == NULL_RTX
19026 || GET_CODE (insn) == NOTE
19027 || GET_CODE (PATTERN (insn)) == USE
19028 || GET_CODE (PATTERN (insn)) == CLOBBER)
19029 return false;
19030
19031 switch (rs6000_cpu)
cbe26ab8 19032 {
44cd321e
PS
19033 case PROCESSOR_POWER5:
19034 if (is_cracked_insn (insn))
19035 return true;
19036 case PROCESSOR_POWER4:
19037 if (is_microcoded_insn (insn))
19038 return true;
19039
19040 if (!rs6000_sched_groups)
19041 return false;
19042
19043 type = get_attr_type (insn);
19044
19045 switch (type)
19046 {
19047 case TYPE_MFCR:
19048 case TYPE_MFCRF:
19049 case TYPE_MTCR:
19050 case TYPE_DELAYED_CR:
19051 case TYPE_CR_LOGICAL:
19052 case TYPE_MTJMPR:
19053 case TYPE_MFJMPR:
19054 case TYPE_IDIV:
19055 case TYPE_LDIV:
19056 case TYPE_LOAD_L:
19057 case TYPE_STORE_C:
19058 case TYPE_ISYNC:
19059 case TYPE_SYNC:
19060 return true;
19061 default:
19062 break;
19063 }
19064 break;
19065 case PROCESSOR_POWER6:
19066 type = get_attr_type (insn);
19067
19068 switch (type)
19069 {
19070 case TYPE_INSERT_DWORD:
19071 case TYPE_EXTS:
19072 case TYPE_CNTLZ:
19073 case TYPE_SHIFT:
19074 case TYPE_VAR_SHIFT_ROTATE:
19075 case TYPE_TRAP:
19076 case TYPE_IMUL:
19077 case TYPE_IMUL2:
19078 case TYPE_IMUL3:
19079 case TYPE_LMUL:
19080 case TYPE_IDIV:
19081 case TYPE_INSERT_WORD:
19082 case TYPE_DELAYED_COMPARE:
19083 case TYPE_IMUL_COMPARE:
19084 case TYPE_LMUL_COMPARE:
19085 case TYPE_FPCOMPARE:
19086 case TYPE_MFCR:
19087 case TYPE_MTCR:
19088 case TYPE_MFJMPR:
19089 case TYPE_MTJMPR:
19090 case TYPE_ISYNC:
19091 case TYPE_SYNC:
19092 case TYPE_LOAD_L:
19093 case TYPE_STORE_C:
19094 case TYPE_LOAD_U:
19095 case TYPE_LOAD_UX:
19096 case TYPE_LOAD_EXT_UX:
19097 case TYPE_STORE_U:
19098 case TYPE_STORE_UX:
19099 case TYPE_FPLOAD_U:
19100 case TYPE_FPLOAD_UX:
19101 case TYPE_FPSTORE_U:
19102 case TYPE_FPSTORE_UX:
19103 return true;
19104 default:
19105 break;
19106 }
19107 break;
19108 default:
19109 break;
19110 }
19111
19112 return false;
19113}
19114
19115static bool
19116insn_must_be_last_in_group (rtx insn)
19117{
19118 enum attr_type type;
19119
19120 if (!insn
19121 || insn == NULL_RTX
19122 || GET_CODE (insn) == NOTE
19123 || GET_CODE (PATTERN (insn)) == USE
19124 || GET_CODE (PATTERN (insn)) == CLOBBER)
19125 return false;
19126
19127 switch (rs6000_cpu) {
19128 case PROCESSOR_POWER4:
19129 case PROCESSOR_POWER5:
19130 if (is_microcoded_insn (insn))
19131 return true;
19132
19133 if (is_branch_slot_insn (insn))
19134 return true;
19135
19136 break;
19137 case PROCESSOR_POWER6:
19138 type = get_attr_type (insn);
19139
19140 switch (type)
19141 {
19142 case TYPE_EXTS:
19143 case TYPE_CNTLZ:
19144 case TYPE_SHIFT:
19145 case TYPE_VAR_SHIFT_ROTATE:
19146 case TYPE_TRAP:
19147 case TYPE_IMUL:
19148 case TYPE_IMUL2:
19149 case TYPE_IMUL3:
19150 case TYPE_LMUL:
19151 case TYPE_IDIV:
19152 case TYPE_DELAYED_COMPARE:
19153 case TYPE_IMUL_COMPARE:
19154 case TYPE_LMUL_COMPARE:
19155 case TYPE_FPCOMPARE:
19156 case TYPE_MFCR:
19157 case TYPE_MTCR:
19158 case TYPE_MFJMPR:
19159 case TYPE_MTJMPR:
19160 case TYPE_ISYNC:
19161 case TYPE_SYNC:
19162 case TYPE_LOAD_L:
19163 case TYPE_STORE_C:
19164 return true;
19165 default:
19166 break;
cbe26ab8 19167 }
44cd321e
PS
19168 break;
19169 default:
19170 break;
19171 }
cbe26ab8
DN
19172
19173 return false;
19174}
19175
839a4992 19176/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19177 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19178
19179static bool
19180is_costly_group (rtx *group_insns, rtx next_insn)
19181{
19182 int i;
cbe26ab8
DN
19183 int issue_rate = rs6000_issue_rate ();
19184
19185 for (i = 0; i < issue_rate; i++)
19186 {
e2f6ff94
MK
19187 sd_iterator_def sd_it;
19188 dep_t dep;
cbe26ab8 19189 rtx insn = group_insns[i];
b198261f 19190
cbe26ab8 19191 if (!insn)
c4ad648e 19192 continue;
b198261f 19193
e2f6ff94 19194 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19195 {
b198261f
MK
19196 rtx next = DEP_CON (dep);
19197
19198 if (next == next_insn
19199 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19200 return true;
c4ad648e 19201 }
cbe26ab8
DN
19202 }
19203
19204 return false;
19205}
19206
f676971a 19207/* Utility of the function redefine_groups.
cbe26ab8
DN
19208 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19209 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19210 to keep it "far" (in a separate group) from GROUP_INSNS, following
19211 one of the following schemes, depending on the value of the flag
19212 -minsert_sched_nops = X:
19213 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19214 in order to force NEXT_INSN into a separate group.
f676971a
EC
19215 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19216 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19217 insertion (has a group just ended, how many vacant issue slots remain in the
19218 last group, and how many dispatch groups were encountered so far). */
19219
f676971a 19220static int
c4ad648e
AM
19221force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19222 rtx next_insn, bool *group_end, int can_issue_more,
19223 int *group_count)
cbe26ab8
DN
19224{
19225 rtx nop;
19226 bool force;
19227 int issue_rate = rs6000_issue_rate ();
19228 bool end = *group_end;
19229 int i;
19230
19231 if (next_insn == NULL_RTX)
19232 return can_issue_more;
19233
19234 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19235 return can_issue_more;
19236
19237 force = is_costly_group (group_insns, next_insn);
19238 if (!force)
19239 return can_issue_more;
19240
19241 if (sched_verbose > 6)
19242 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19243 *group_count ,can_issue_more);
cbe26ab8
DN
19244
19245 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19246 {
19247 if (*group_end)
c4ad648e 19248 can_issue_more = 0;
cbe26ab8
DN
19249
19250 /* Since only a branch can be issued in the last issue_slot, it is
19251 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19252 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19253 in this case the last nop will start a new group and the branch
19254 will be forced to the new group. */
cbe26ab8 19255 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19256 can_issue_more--;
cbe26ab8
DN
19257
19258 while (can_issue_more > 0)
c4ad648e 19259 {
9390387d 19260 nop = gen_nop ();
c4ad648e
AM
19261 emit_insn_before (nop, next_insn);
19262 can_issue_more--;
19263 }
cbe26ab8
DN
19264
19265 *group_end = true;
19266 return 0;
f676971a 19267 }
cbe26ab8
DN
19268
19269 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19270 {
19271 int n_nops = rs6000_sched_insert_nops;
19272
f676971a 19273 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19274 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19275 if (can_issue_more == 0)
c4ad648e 19276 can_issue_more = issue_rate;
cbe26ab8
DN
19277 can_issue_more--;
19278 if (can_issue_more == 0)
c4ad648e
AM
19279 {
19280 can_issue_more = issue_rate - 1;
19281 (*group_count)++;
19282 end = true;
19283 for (i = 0; i < issue_rate; i++)
19284 {
19285 group_insns[i] = 0;
19286 }
19287 }
cbe26ab8
DN
19288
19289 while (n_nops > 0)
c4ad648e
AM
19290 {
19291 nop = gen_nop ();
19292 emit_insn_before (nop, next_insn);
19293 if (can_issue_more == issue_rate - 1) /* new group begins */
19294 end = false;
19295 can_issue_more--;
19296 if (can_issue_more == 0)
19297 {
19298 can_issue_more = issue_rate - 1;
19299 (*group_count)++;
19300 end = true;
19301 for (i = 0; i < issue_rate; i++)
19302 {
19303 group_insns[i] = 0;
19304 }
19305 }
19306 n_nops--;
19307 }
cbe26ab8
DN
19308
19309 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19310 can_issue_more++;
cbe26ab8 19311
c4ad648e
AM
19312 /* Is next_insn going to start a new group? */
19313 *group_end
19314 = (end
cbe26ab8
DN
19315 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19316 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19317 || (can_issue_more < issue_rate &&
c4ad648e 19318 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19319 if (*group_end && end)
c4ad648e 19320 (*group_count)--;
cbe26ab8
DN
19321
19322 if (sched_verbose > 6)
c4ad648e
AM
19323 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19324 *group_count, can_issue_more);
f676971a
EC
19325 return can_issue_more;
19326 }
cbe26ab8
DN
19327
19328 return can_issue_more;
19329}
19330
19331/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19332 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19333 form in practice. It tries to achieve this synchronization by forcing the
19334 estimated processor grouping on the compiler (as opposed to the function
19335 'pad_goups' which tries to force the scheduler's grouping on the processor).
19336
19337 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19338 examines the (estimated) dispatch groups that will be formed by the processor
19339 dispatcher. It marks these group boundaries to reflect the estimated
19340 processor grouping, overriding the grouping that the scheduler had marked.
19341 Depending on the value of the flag '-minsert-sched-nops' this function can
19342 force certain insns into separate groups or force a certain distance between
19343 them by inserting nops, for example, if there exists a "costly dependence"
19344 between the insns.
19345
19346 The function estimates the group boundaries that the processor will form as
0fa2e4df 19347 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19348 each insn. A subsequent insn will start a new group if one of the following
19349 4 cases applies:
19350 - no more vacant issue slots remain in the current dispatch group.
19351 - only the last issue slot, which is the branch slot, is vacant, but the next
19352 insn is not a branch.
19353 - only the last 2 or less issue slots, including the branch slot, are vacant,
19354 which means that a cracked insn (which occupies two issue slots) can't be
19355 issued in this group.
f676971a 19356 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19357 start a new group. */
19358
19359static int
19360redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19361{
19362 rtx insn, next_insn;
19363 int issue_rate;
19364 int can_issue_more;
19365 int slot, i;
19366 bool group_end;
19367 int group_count = 0;
19368 rtx *group_insns;
19369
19370 /* Initialize. */
19371 issue_rate = rs6000_issue_rate ();
19372 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19373 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19374 {
19375 group_insns[i] = 0;
19376 }
19377 can_issue_more = issue_rate;
19378 slot = 0;
19379 insn = get_next_active_insn (prev_head_insn, tail);
19380 group_end = false;
19381
19382 while (insn != NULL_RTX)
19383 {
19384 slot = (issue_rate - can_issue_more);
19385 group_insns[slot] = insn;
19386 can_issue_more =
c4ad648e 19387 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19388 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19389 can_issue_more = 0;
cbe26ab8
DN
19390
19391 next_insn = get_next_active_insn (insn, tail);
19392 if (next_insn == NULL_RTX)
c4ad648e 19393 return group_count + 1;
cbe26ab8 19394
c4ad648e
AM
19395 /* Is next_insn going to start a new group? */
19396 group_end
19397 = (can_issue_more == 0
19398 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19399 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19400 || (can_issue_more < issue_rate &&
19401 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19402
f676971a 19403 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19404 next_insn, &group_end, can_issue_more,
19405 &group_count);
cbe26ab8
DN
19406
19407 if (group_end)
c4ad648e
AM
19408 {
19409 group_count++;
19410 can_issue_more = 0;
19411 for (i = 0; i < issue_rate; i++)
19412 {
19413 group_insns[i] = 0;
19414 }
19415 }
cbe26ab8
DN
19416
19417 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19418 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19419 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19420 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19421
19422 insn = next_insn;
19423 if (can_issue_more == 0)
c4ad648e
AM
19424 can_issue_more = issue_rate;
19425 } /* while */
cbe26ab8
DN
19426
19427 return group_count;
19428}
19429
19430/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19431 dispatch group boundaries that the scheduler had marked. Pad with nops
19432 any dispatch groups which have vacant issue slots, in order to force the
19433 scheduler's grouping on the processor dispatcher. The function
19434 returns the number of dispatch groups found. */
19435
19436static int
19437pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19438{
19439 rtx insn, next_insn;
19440 rtx nop;
19441 int issue_rate;
19442 int can_issue_more;
19443 int group_end;
19444 int group_count = 0;
19445
19446 /* Initialize issue_rate. */
19447 issue_rate = rs6000_issue_rate ();
19448 can_issue_more = issue_rate;
19449
19450 insn = get_next_active_insn (prev_head_insn, tail);
19451 next_insn = get_next_active_insn (insn, tail);
19452
19453 while (insn != NULL_RTX)
19454 {
19455 can_issue_more =
19456 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19457
19458 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19459
19460 if (next_insn == NULL_RTX)
c4ad648e 19461 break;
cbe26ab8
DN
19462
19463 if (group_end)
c4ad648e
AM
19464 {
19465 /* If the scheduler had marked group termination at this location
19466 (between insn and next_indn), and neither insn nor next_insn will
19467 force group termination, pad the group with nops to force group
19468 termination. */
19469 if (can_issue_more
19470 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19471 && !insn_terminates_group_p (insn, current_group)
19472 && !insn_terminates_group_p (next_insn, previous_group))
19473 {
9390387d 19474 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19475 can_issue_more--;
19476
19477 while (can_issue_more)
19478 {
19479 nop = gen_nop ();
19480 emit_insn_before (nop, next_insn);
19481 can_issue_more--;
19482 }
19483 }
19484
19485 can_issue_more = issue_rate;
19486 group_count++;
19487 }
cbe26ab8
DN
19488
19489 insn = next_insn;
19490 next_insn = get_next_active_insn (insn, tail);
19491 }
19492
19493 return group_count;
19494}
19495
44cd321e
PS
19496/* We're beginning a new block. Initialize data structures as necessary. */
19497
19498static void
19499rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19500 int sched_verbose ATTRIBUTE_UNUSED,
19501 int max_ready ATTRIBUTE_UNUSED)
982afe02 19502{
44cd321e
PS
19503 last_scheduled_insn = NULL_RTX;
19504 load_store_pendulum = 0;
19505}
19506
cbe26ab8
DN
19507/* The following function is called at the end of scheduling BB.
19508 After reload, it inserts nops at insn group bundling. */
19509
19510static void
38f391a5 19511rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19512{
19513 int n_groups;
19514
19515 if (sched_verbose)
19516 fprintf (dump, "=== Finishing schedule.\n");
19517
ec507f2d 19518 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19519 {
19520 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19521 return;
cbe26ab8
DN
19522
19523 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19524 n_groups = pad_groups (dump, sched_verbose,
19525 current_sched_info->prev_head,
19526 current_sched_info->next_tail);
cbe26ab8 19527 else
c4ad648e
AM
19528 n_groups = redefine_groups (dump, sched_verbose,
19529 current_sched_info->prev_head,
19530 current_sched_info->next_tail);
cbe26ab8
DN
19531
19532 if (sched_verbose >= 6)
19533 {
19534 fprintf (dump, "ngroups = %d\n", n_groups);
19535 print_rtl (dump, current_sched_info->prev_head);
19536 fprintf (dump, "Done finish_sched\n");
19537 }
19538 }
19539}
b6c9286a 19540\f
b6c9286a
MM
19541/* Length in units of the trampoline for entering a nested function. */
19542
19543int
863d938c 19544rs6000_trampoline_size (void)
b6c9286a
MM
19545{
19546 int ret = 0;
19547
19548 switch (DEFAULT_ABI)
19549 {
19550 default:
37409796 19551 gcc_unreachable ();
b6c9286a
MM
19552
19553 case ABI_AIX:
8f802bfb 19554 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19555 break;
19556
4dabc42d 19557 case ABI_DARWIN:
b6c9286a 19558 case ABI_V4:
03a7e1a5 19559 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19560 break;
b6c9286a
MM
19561 }
19562
19563 return ret;
19564}
19565
19566/* Emit RTL insns to initialize the variable parts of a trampoline.
19567 FNADDR is an RTX for the address of the function's pure code.
19568 CXT is an RTX for the static chain value for the function. */
19569
19570void
a2369ed3 19571rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19572{
8bd04c56 19573 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19574 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19575
19576 switch (DEFAULT_ABI)
19577 {
19578 default:
37409796 19579 gcc_unreachable ();
b6c9286a 19580
8bd04c56 19581/* Macros to shorten the code expansions below. */
9613eaff 19582#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19583#define MEM_PLUS(addr,offset) \
9613eaff 19584 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19585
b6c9286a
MM
19586 /* Under AIX, just build the 3 word function descriptor */
19587 case ABI_AIX:
8bd04c56 19588 {
9613eaff
SH
19589 rtx fn_reg = gen_reg_rtx (Pmode);
19590 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19591 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19592 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19593 emit_move_insn (MEM_DEREF (addr), fn_reg);
19594 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19595 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19596 }
b6c9286a
MM
19597 break;
19598
4dabc42d
TC
19599 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19600 case ABI_DARWIN:
b6c9286a 19601 case ABI_V4:
9613eaff 19602 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19603 FALSE, VOIDmode, 4,
9613eaff 19604 addr, Pmode,
eaf1bcf1 19605 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19606 fnaddr, Pmode,
19607 ctx_reg, Pmode);
b6c9286a 19608 break;
b6c9286a
MM
19609 }
19610
19611 return;
19612}
7509c759
MM
19613
19614\f
91d231cb 19615/* Table of valid machine attributes. */
a4f6c312 19616
91d231cb 19617const struct attribute_spec rs6000_attribute_table[] =
7509c759 19618{
91d231cb 19619 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19620 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19621 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19622 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19623 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19624 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19625#ifdef SUBTARGET_ATTRIBUTE_TABLE
19626 SUBTARGET_ATTRIBUTE_TABLE,
19627#endif
a5c76ee6 19628 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19629};
7509c759 19630
8bb418a3
ZL
19631/* Handle the "altivec" attribute. The attribute may have
19632 arguments as follows:
f676971a 19633
8bb418a3
ZL
19634 __attribute__((altivec(vector__)))
19635 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19636 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19637
19638 and may appear more than once (e.g., 'vector bool char') in a
19639 given declaration. */
19640
19641static tree
f90ac3f0
UP
19642rs6000_handle_altivec_attribute (tree *node,
19643 tree name ATTRIBUTE_UNUSED,
19644 tree args,
8bb418a3
ZL
19645 int flags ATTRIBUTE_UNUSED,
19646 bool *no_add_attrs)
19647{
19648 tree type = *node, result = NULL_TREE;
19649 enum machine_mode mode;
19650 int unsigned_p;
19651 char altivec_type
19652 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19653 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19654 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19655 : '?');
8bb418a3
ZL
19656
19657 while (POINTER_TYPE_P (type)
19658 || TREE_CODE (type) == FUNCTION_TYPE
19659 || TREE_CODE (type) == METHOD_TYPE
19660 || TREE_CODE (type) == ARRAY_TYPE)
19661 type = TREE_TYPE (type);
19662
19663 mode = TYPE_MODE (type);
19664
f90ac3f0
UP
19665 /* Check for invalid AltiVec type qualifiers. */
19666 if (type == long_unsigned_type_node || type == long_integer_type_node)
19667 {
19668 if (TARGET_64BIT)
19669 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19670 else if (rs6000_warn_altivec_long)
d4ee4d25 19671 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19672 }
19673 else if (type == long_long_unsigned_type_node
19674 || type == long_long_integer_type_node)
19675 error ("use of %<long long%> in AltiVec types is invalid");
19676 else if (type == double_type_node)
19677 error ("use of %<double%> in AltiVec types is invalid");
19678 else if (type == long_double_type_node)
19679 error ("use of %<long double%> in AltiVec types is invalid");
19680 else if (type == boolean_type_node)
19681 error ("use of boolean types in AltiVec types is invalid");
19682 else if (TREE_CODE (type) == COMPLEX_TYPE)
19683 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19684 else if (DECIMAL_FLOAT_MODE_P (mode))
19685 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19686
19687 switch (altivec_type)
19688 {
19689 case 'v':
8df83eae 19690 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19691 switch (mode)
19692 {
c4ad648e
AM
19693 case SImode:
19694 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19695 break;
19696 case HImode:
19697 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19698 break;
19699 case QImode:
19700 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19701 break;
19702 case SFmode: result = V4SF_type_node; break;
19703 /* If the user says 'vector int bool', we may be handed the 'bool'
19704 attribute _before_ the 'vector' attribute, and so select the
19705 proper type in the 'b' case below. */
19706 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19707 result = type;
19708 default: break;
8bb418a3
ZL
19709 }
19710 break;
19711 case 'b':
19712 switch (mode)
19713 {
c4ad648e
AM
19714 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19715 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19716 case QImode: case V16QImode: result = bool_V16QI_type_node;
19717 default: break;
8bb418a3
ZL
19718 }
19719 break;
19720 case 'p':
19721 switch (mode)
19722 {
c4ad648e
AM
19723 case V8HImode: result = pixel_V8HI_type_node;
19724 default: break;
8bb418a3
ZL
19725 }
19726 default: break;
19727 }
19728
7958a2a6
FJ
19729 if (result && result != type && TYPE_READONLY (type))
19730 result = build_qualified_type (result, TYPE_QUAL_CONST);
19731
8bb418a3
ZL
19732 *no_add_attrs = true; /* No need to hang on to the attribute. */
19733
f90ac3f0 19734 if (result)
8bb418a3
ZL
19735 *node = reconstruct_complex_type (*node, result);
19736
19737 return NULL_TREE;
19738}
19739
f18eca82
ZL
19740/* AltiVec defines four built-in scalar types that serve as vector
19741 elements; we must teach the compiler how to mangle them. */
19742
19743static const char *
3101faab 19744rs6000_mangle_type (const_tree type)
f18eca82 19745{
608063c3
JB
19746 type = TYPE_MAIN_VARIANT (type);
19747
19748 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19749 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19750 return NULL;
19751
f18eca82
ZL
19752 if (type == bool_char_type_node) return "U6__boolc";
19753 if (type == bool_short_type_node) return "U6__bools";
19754 if (type == pixel_type_node) return "u7__pixel";
19755 if (type == bool_int_type_node) return "U6__booli";
19756
337bde91
DE
19757 /* Mangle IBM extended float long double as `g' (__float128) on
19758 powerpc*-linux where long-double-64 previously was the default. */
19759 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19760 && TARGET_ELF
19761 && TARGET_LONG_DOUBLE_128
19762 && !TARGET_IEEEQUAD)
19763 return "g";
19764
f18eca82
ZL
19765 /* For all other types, use normal C++ mangling. */
19766 return NULL;
19767}
19768
a5c76ee6
ZW
19769/* Handle a "longcall" or "shortcall" attribute; arguments as in
19770 struct attribute_spec.handler. */
a4f6c312 19771
91d231cb 19772static tree
f676971a
EC
19773rs6000_handle_longcall_attribute (tree *node, tree name,
19774 tree args ATTRIBUTE_UNUSED,
19775 int flags ATTRIBUTE_UNUSED,
a2369ed3 19776 bool *no_add_attrs)
91d231cb
JM
19777{
19778 if (TREE_CODE (*node) != FUNCTION_TYPE
19779 && TREE_CODE (*node) != FIELD_DECL
19780 && TREE_CODE (*node) != TYPE_DECL)
19781 {
5c498b10 19782 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19783 IDENTIFIER_POINTER (name));
19784 *no_add_attrs = true;
19785 }
6a4cee5f 19786
91d231cb 19787 return NULL_TREE;
7509c759
MM
19788}
19789
a5c76ee6
ZW
19790/* Set longcall attributes on all functions declared when
19791 rs6000_default_long_calls is true. */
19792static void
a2369ed3 19793rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19794{
19795 if (rs6000_default_long_calls
19796 && (TREE_CODE (type) == FUNCTION_TYPE
19797 || TREE_CODE (type) == METHOD_TYPE))
19798 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19799 NULL_TREE,
19800 TYPE_ATTRIBUTES (type));
16d6f994
EC
19801
19802#if TARGET_MACHO
19803 darwin_set_default_type_attributes (type);
19804#endif
a5c76ee6
ZW
19805}
19806
3cb999d8
DE
19807/* Return a reference suitable for calling a function with the
19808 longcall attribute. */
a4f6c312 19809
9390387d 19810rtx
a2369ed3 19811rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19812{
d330fd93 19813 const char *call_name;
6a4cee5f
MM
19814 tree node;
19815
19816 if (GET_CODE (call_ref) != SYMBOL_REF)
19817 return call_ref;
19818
19819 /* System V adds '.' to the internal name, so skip them. */
19820 call_name = XSTR (call_ref, 0);
19821 if (*call_name == '.')
19822 {
19823 while (*call_name == '.')
19824 call_name++;
19825
19826 node = get_identifier (call_name);
39403d82 19827 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19828 }
19829
19830 return force_reg (Pmode, call_ref);
19831}
7509c759 19832\f
77ccdfed
EC
19833#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19834#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19835#endif
19836
19837/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19838 struct attribute_spec.handler. */
19839static tree
19840rs6000_handle_struct_attribute (tree *node, tree name,
19841 tree args ATTRIBUTE_UNUSED,
19842 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19843{
19844 tree *type = NULL;
19845 if (DECL_P (*node))
19846 {
19847 if (TREE_CODE (*node) == TYPE_DECL)
19848 type = &TREE_TYPE (*node);
19849 }
19850 else
19851 type = node;
19852
19853 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19854 || TREE_CODE (*type) == UNION_TYPE)))
19855 {
19856 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19857 *no_add_attrs = true;
19858 }
19859
19860 else if ((is_attribute_p ("ms_struct", name)
19861 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19862 || ((is_attribute_p ("gcc_struct", name)
19863 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19864 {
19865 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19866 IDENTIFIER_POINTER (name));
19867 *no_add_attrs = true;
19868 }
19869
19870 return NULL_TREE;
19871}
19872
19873static bool
3101faab 19874rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19875{
19876 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19877 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19878 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19879}
19880\f
b64a1b53
RH
19881#ifdef USING_ELFOS_H
19882
d6b5193b 19883/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19884
d6b5193b
RS
19885static void
19886rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19887{
19888 if (DEFAULT_ABI == ABI_AIX
19889 && TARGET_MINIMAL_TOC
19890 && !TARGET_RELOCATABLE)
19891 {
19892 if (!toc_initialized)
19893 {
19894 toc_initialized = 1;
19895 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19896 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19897 fprintf (asm_out_file, "\t.tc ");
19898 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19899 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19900 fprintf (asm_out_file, "\n");
19901
19902 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19903 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19904 fprintf (asm_out_file, " = .+32768\n");
19905 }
19906 else
19907 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19908 }
19909 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19910 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19911 else
19912 {
19913 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19914 if (!toc_initialized)
19915 {
19916 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19917 fprintf (asm_out_file, " = .+32768\n");
19918 toc_initialized = 1;
19919 }
19920 }
19921}
19922
19923/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19924
b64a1b53 19925static void
d6b5193b
RS
19926rs6000_elf_asm_init_sections (void)
19927{
19928 toc_section
19929 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19930
19931 sdata2_section
19932 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19933 SDATA2_SECTION_ASM_OP);
19934}
19935
19936/* Implement TARGET_SELECT_RTX_SECTION. */
19937
19938static section *
f676971a 19939rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19940 unsigned HOST_WIDE_INT align)
7509c759 19941{
a9098fd0 19942 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19943 return toc_section;
7509c759 19944 else
d6b5193b 19945 return default_elf_select_rtx_section (mode, x, align);
7509c759 19946}
d9407988 19947\f
d1908feb
JJ
19948/* For a SYMBOL_REF, set generic flags and then perform some
19949 target-specific processing.
19950
d1908feb
JJ
19951 When the AIX ABI is requested on a non-AIX system, replace the
19952 function name with the real name (with a leading .) rather than the
19953 function descriptor name. This saves a lot of overriding code to
19954 read the prefixes. */
d9407988 19955
fb49053f 19956static void
a2369ed3 19957rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19958{
d1908feb 19959 default_encode_section_info (decl, rtl, first);
b2003250 19960
d1908feb
JJ
19961 if (first
19962 && TREE_CODE (decl) == FUNCTION_DECL
19963 && !TARGET_AIX
19964 && DEFAULT_ABI == ABI_AIX)
d9407988 19965 {
c6a2438a 19966 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19967 size_t len = strlen (XSTR (sym_ref, 0));
19968 char *str = alloca (len + 2);
19969 str[0] = '.';
19970 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19971 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19972 }
d9407988
MM
19973}
19974
21d9bb3f
PB
19975static inline bool
19976compare_section_name (const char *section, const char *template)
19977{
19978 int len;
19979
19980 len = strlen (template);
19981 return (strncmp (section, template, len) == 0
19982 && (section[len] == 0 || section[len] == '.'));
19983}
19984
c1b7d95a 19985bool
3101faab 19986rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19987{
19988 if (rs6000_sdata == SDATA_NONE)
19989 return false;
19990
7482ad25
AF
19991 /* We want to merge strings, so we never consider them small data. */
19992 if (TREE_CODE (decl) == STRING_CST)
19993 return false;
19994
19995 /* Functions are never in the small data area. */
19996 if (TREE_CODE (decl) == FUNCTION_DECL)
19997 return false;
19998
0e5dbd9b
DE
19999 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20000 {
20001 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20002 if (compare_section_name (section, ".sdata")
20003 || compare_section_name (section, ".sdata2")
20004 || compare_section_name (section, ".gnu.linkonce.s")
20005 || compare_section_name (section, ".sbss")
20006 || compare_section_name (section, ".sbss2")
20007 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20008 || strcmp (section, ".PPC.EMB.sdata0") == 0
20009 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20010 return true;
20011 }
20012 else
20013 {
20014 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20015
20016 if (size > 0
307b599c 20017 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20018 /* If it's not public, and we're not going to reference it there,
20019 there's no need to put it in the small data section. */
0e5dbd9b
DE
20020 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20021 return true;
20022 }
20023
20024 return false;
20025}
20026
b91da81f 20027#endif /* USING_ELFOS_H */
aacd3885
RS
20028\f
20029/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20030
aacd3885 20031static bool
3101faab 20032rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20033{
20034 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20035}
a6c2a102 20036\f
000034eb 20037/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20038 ADDR can be effectively incremented by incrementing REG.
20039
20040 r0 is special and we must not select it as an address
20041 register by this routine since our caller will try to
20042 increment the returned register via an "la" instruction. */
000034eb 20043
9390387d 20044rtx
a2369ed3 20045find_addr_reg (rtx addr)
000034eb
DE
20046{
20047 while (GET_CODE (addr) == PLUS)
20048 {
02441cd6
JL
20049 if (GET_CODE (XEXP (addr, 0)) == REG
20050 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20051 addr = XEXP (addr, 0);
02441cd6
JL
20052 else if (GET_CODE (XEXP (addr, 1)) == REG
20053 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20054 addr = XEXP (addr, 1);
20055 else if (CONSTANT_P (XEXP (addr, 0)))
20056 addr = XEXP (addr, 1);
20057 else if (CONSTANT_P (XEXP (addr, 1)))
20058 addr = XEXP (addr, 0);
20059 else
37409796 20060 gcc_unreachable ();
000034eb 20061 }
37409796
NS
20062 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20063 return addr;
000034eb
DE
20064}
20065
a6c2a102 20066void
a2369ed3 20067rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20068{
20069 fatal_insn ("bad address", op);
20070}
c8023011 20071
ee890fe2
SS
20072#if TARGET_MACHO
20073
efdba735 20074static tree branch_island_list = 0;
ee890fe2 20075
efdba735
SH
20076/* Remember to generate a branch island for far calls to the given
20077 function. */
ee890fe2 20078
f676971a 20079static void
c4ad648e
AM
20080add_compiler_branch_island (tree label_name, tree function_name,
20081 int line_number)
ee890fe2 20082{
efdba735 20083 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20084 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20085 TREE_CHAIN (branch_island) = branch_island_list;
20086 branch_island_list = branch_island;
ee890fe2
SS
20087}
20088
efdba735
SH
20089#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20090#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20091#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20092 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20093
efdba735
SH
20094/* Generate far-jump branch islands for everything on the
20095 branch_island_list. Invoked immediately after the last instruction
20096 of the epilogue has been emitted; the branch-islands must be
20097 appended to, and contiguous with, the function body. Mach-O stubs
20098 are generated in machopic_output_stub(). */
ee890fe2 20099
efdba735
SH
20100static void
20101macho_branch_islands (void)
20102{
20103 char tmp_buf[512];
20104 tree branch_island;
20105
20106 for (branch_island = branch_island_list;
20107 branch_island;
20108 branch_island = TREE_CHAIN (branch_island))
20109 {
20110 const char *label =
20111 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20112 const char *name =
11abc112 20113 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20114 char name_buf[512];
20115 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20116 if (name[0] == '*' || name[0] == '&')
20117 strcpy (name_buf, name+1);
20118 else
20119 {
20120 name_buf[0] = '_';
20121 strcpy (name_buf+1, name);
20122 }
20123 strcpy (tmp_buf, "\n");
20124 strcat (tmp_buf, label);
ee890fe2 20125#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20126 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20127 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20128#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20129 if (flag_pic)
20130 {
20131 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20132 strcat (tmp_buf, label);
20133 strcat (tmp_buf, "_pic\n");
20134 strcat (tmp_buf, label);
20135 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20136
efdba735
SH
20137 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20138 strcat (tmp_buf, name_buf);
20139 strcat (tmp_buf, " - ");
20140 strcat (tmp_buf, label);
20141 strcat (tmp_buf, "_pic)\n");
f676971a 20142
efdba735 20143 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20144
efdba735
SH
20145 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20146 strcat (tmp_buf, name_buf);
20147 strcat (tmp_buf, " - ");
20148 strcat (tmp_buf, label);
20149 strcat (tmp_buf, "_pic)\n");
f676971a 20150
efdba735
SH
20151 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20152 }
20153 else
20154 {
20155 strcat (tmp_buf, ":\nlis r12,hi16(");
20156 strcat (tmp_buf, name_buf);
20157 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20158 strcat (tmp_buf, name_buf);
20159 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20160 }
20161 output_asm_insn (tmp_buf, 0);
ee890fe2 20162#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20163 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20164 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20165#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20166 }
ee890fe2 20167
efdba735 20168 branch_island_list = 0;
ee890fe2
SS
20169}
20170
20171/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20172 already there or not. */
20173
efdba735 20174static int
a2369ed3 20175no_previous_def (tree function_name)
ee890fe2 20176{
efdba735
SH
20177 tree branch_island;
20178 for (branch_island = branch_island_list;
20179 branch_island;
20180 branch_island = TREE_CHAIN (branch_island))
20181 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20182 return 0;
20183 return 1;
20184}
20185
20186/* GET_PREV_LABEL gets the label name from the previous definition of
20187 the function. */
20188
efdba735 20189static tree
a2369ed3 20190get_prev_label (tree function_name)
ee890fe2 20191{
efdba735
SH
20192 tree branch_island;
20193 for (branch_island = branch_island_list;
20194 branch_island;
20195 branch_island = TREE_CHAIN (branch_island))
20196 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20197 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20198 return 0;
20199}
20200
75b1b789
MS
20201#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20202#define DARWIN_LINKER_GENERATES_ISLANDS 0
20203#endif
20204
20205/* KEXTs still need branch islands. */
20206#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20207 || flag_mkernel || flag_apple_kext)
20208
ee890fe2 20209/* INSN is either a function call or a millicode call. It may have an
f676971a 20210 unconditional jump in its delay slot.
ee890fe2
SS
20211
20212 CALL_DEST is the routine we are calling. */
20213
20214char *
c4ad648e
AM
20215output_call (rtx insn, rtx *operands, int dest_operand_number,
20216 int cookie_operand_number)
ee890fe2
SS
20217{
20218 static char buf[256];
75b1b789
MS
20219 if (DARWIN_GENERATE_ISLANDS
20220 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20221 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20222 {
20223 tree labelname;
efdba735 20224 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20225
ee890fe2
SS
20226 if (no_previous_def (funname))
20227 {
ee890fe2
SS
20228 rtx label_rtx = gen_label_rtx ();
20229 char *label_buf, temp_buf[256];
20230 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20231 CODE_LABEL_NUMBER (label_rtx));
20232 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20233 labelname = get_identifier (label_buf);
a38e7aa5 20234 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20235 }
20236 else
20237 labelname = get_prev_label (funname);
20238
efdba735
SH
20239 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20240 instruction will reach 'foo', otherwise link as 'bl L42'".
20241 "L42" should be a 'branch island', that will do a far jump to
20242 'foo'. Branch islands are generated in
20243 macho_branch_islands(). */
ee890fe2 20244 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20245 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20246 }
20247 else
efdba735
SH
20248 sprintf (buf, "bl %%z%d", dest_operand_number);
20249 return buf;
ee890fe2
SS
20250}
20251
ee890fe2
SS
20252/* Generate PIC and indirect symbol stubs. */
20253
20254void
a2369ed3 20255machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20256{
20257 unsigned int length;
a4f6c312
SS
20258 char *symbol_name, *lazy_ptr_name;
20259 char *local_label_0;
ee890fe2
SS
20260 static int label = 0;
20261
df56a27f 20262 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20263 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20264
ee890fe2 20265
ee890fe2
SS
20266 length = strlen (symb);
20267 symbol_name = alloca (length + 32);
20268 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20269
20270 lazy_ptr_name = alloca (length + 32);
20271 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20272
ee890fe2 20273 if (flag_pic == 2)
56c779bc 20274 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20275 else
56c779bc 20276 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20277
20278 if (flag_pic == 2)
20279 {
d974312d
DJ
20280 fprintf (file, "\t.align 5\n");
20281
20282 fprintf (file, "%s:\n", stub);
20283 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20284
876455fa 20285 label++;
89da1f32 20286 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20287 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20288
ee890fe2
SS
20289 fprintf (file, "\tmflr r0\n");
20290 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20291 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20292 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20293 lazy_ptr_name, local_label_0);
20294 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20295 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20296 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20297 lazy_ptr_name, local_label_0);
20298 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20299 fprintf (file, "\tbctr\n");
20300 }
20301 else
d974312d
DJ
20302 {
20303 fprintf (file, "\t.align 4\n");
20304
20305 fprintf (file, "%s:\n", stub);
20306 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20307
20308 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20309 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20310 (TARGET_64BIT ? "ldu" : "lwzu"),
20311 lazy_ptr_name);
d974312d
DJ
20312 fprintf (file, "\tmtctr r12\n");
20313 fprintf (file, "\tbctr\n");
20314 }
f676971a 20315
56c779bc 20316 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20317 fprintf (file, "%s:\n", lazy_ptr_name);
20318 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20319 fprintf (file, "%sdyld_stub_binding_helper\n",
20320 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20321}
20322
20323/* Legitimize PIC addresses. If the address is already
20324 position-independent, we return ORIG. Newly generated
20325 position-independent addresses go into a reg. This is REG if non
20326 zero, otherwise we allocate register(s) as necessary. */
20327
4fbbe694 20328#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20329
20330rtx
f676971a 20331rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20332 rtx reg)
ee890fe2
SS
20333{
20334 rtx base, offset;
20335
20336 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20337 reg = gen_reg_rtx (Pmode);
20338
20339 if (GET_CODE (orig) == CONST)
20340 {
37409796
NS
20341 rtx reg_temp;
20342
ee890fe2
SS
20343 if (GET_CODE (XEXP (orig, 0)) == PLUS
20344 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20345 return orig;
20346
37409796 20347 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20348
37409796
NS
20349 /* Use a different reg for the intermediate value, as
20350 it will be marked UNCHANGING. */
b3a13419 20351 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20352 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20353 Pmode, reg_temp);
20354 offset =
20355 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20356 Pmode, reg);
bb8df8a6 20357
ee890fe2
SS
20358 if (GET_CODE (offset) == CONST_INT)
20359 {
20360 if (SMALL_INT (offset))
ed8908e7 20361 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20362 else if (! reload_in_progress && ! reload_completed)
20363 offset = force_reg (Pmode, offset);
20364 else
c859cda6
DJ
20365 {
20366 rtx mem = force_const_mem (Pmode, orig);
20367 return machopic_legitimize_pic_address (mem, Pmode, reg);
20368 }
ee890fe2 20369 }
f1c25d3b 20370 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20371 }
20372
20373 /* Fall back on generic machopic code. */
20374 return machopic_legitimize_pic_address (orig, mode, reg);
20375}
20376
c4e18b1c
GK
20377/* Output a .machine directive for the Darwin assembler, and call
20378 the generic start_file routine. */
20379
20380static void
20381rs6000_darwin_file_start (void)
20382{
94ff898d 20383 static const struct
c4e18b1c
GK
20384 {
20385 const char *arg;
20386 const char *name;
20387 int if_set;
20388 } mapping[] = {
55dbfb48 20389 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20390 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20391 { "power4", "ppc970", 0 },
20392 { "G5", "ppc970", 0 },
20393 { "7450", "ppc7450", 0 },
20394 { "7400", "ppc7400", MASK_ALTIVEC },
20395 { "G4", "ppc7400", 0 },
20396 { "750", "ppc750", 0 },
20397 { "740", "ppc750", 0 },
20398 { "G3", "ppc750", 0 },
20399 { "604e", "ppc604e", 0 },
20400 { "604", "ppc604", 0 },
20401 { "603e", "ppc603", 0 },
20402 { "603", "ppc603", 0 },
20403 { "601", "ppc601", 0 },
20404 { NULL, "ppc", 0 } };
20405 const char *cpu_id = "";
20406 size_t i;
94ff898d 20407
9390387d 20408 rs6000_file_start ();
192d0f89 20409 darwin_file_start ();
c4e18b1c
GK
20410
20411 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20412 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20413 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20414 && rs6000_select[i].string[0] != '\0')
20415 cpu_id = rs6000_select[i].string;
20416
20417 /* Look through the mapping array. Pick the first name that either
20418 matches the argument, has a bit set in IF_SET that is also set
20419 in the target flags, or has a NULL name. */
20420
20421 i = 0;
20422 while (mapping[i].arg != NULL
20423 && strcmp (mapping[i].arg, cpu_id) != 0
20424 && (mapping[i].if_set & target_flags) == 0)
20425 i++;
20426
20427 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20428}
20429
ee890fe2 20430#endif /* TARGET_MACHO */
7c262518
RH
20431
20432#if TARGET_ELF
9b580a0b
RH
20433static int
20434rs6000_elf_reloc_rw_mask (void)
7c262518 20435{
9b580a0b
RH
20436 if (flag_pic)
20437 return 3;
20438 else if (DEFAULT_ABI == ABI_AIX)
20439 return 2;
20440 else
20441 return 0;
7c262518 20442}
d9f6800d
RH
20443
20444/* Record an element in the table of global constructors. SYMBOL is
20445 a SYMBOL_REF of the function to be called; PRIORITY is a number
20446 between 0 and MAX_INIT_PRIORITY.
20447
20448 This differs from default_named_section_asm_out_constructor in
20449 that we have special handling for -mrelocatable. */
20450
20451static void
a2369ed3 20452rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20453{
20454 const char *section = ".ctors";
20455 char buf[16];
20456
20457 if (priority != DEFAULT_INIT_PRIORITY)
20458 {
20459 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20460 /* Invert the numbering so the linker puts us in the proper
20461 order; constructors are run from right to left, and the
20462 linker sorts in increasing order. */
20463 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20464 section = buf;
20465 }
20466
d6b5193b 20467 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20468 assemble_align (POINTER_SIZE);
d9f6800d
RH
20469
20470 if (TARGET_RELOCATABLE)
20471 {
20472 fputs ("\t.long (", asm_out_file);
20473 output_addr_const (asm_out_file, symbol);
20474 fputs (")@fixup\n", asm_out_file);
20475 }
20476 else
c8af3574 20477 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20478}
20479
20480static void
a2369ed3 20481rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20482{
20483 const char *section = ".dtors";
20484 char buf[16];
20485
20486 if (priority != DEFAULT_INIT_PRIORITY)
20487 {
20488 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20489 /* Invert the numbering so the linker puts us in the proper
20490 order; constructors are run from right to left, and the
20491 linker sorts in increasing order. */
20492 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20493 section = buf;
20494 }
20495
d6b5193b 20496 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20497 assemble_align (POINTER_SIZE);
d9f6800d
RH
20498
20499 if (TARGET_RELOCATABLE)
20500 {
20501 fputs ("\t.long (", asm_out_file);
20502 output_addr_const (asm_out_file, symbol);
20503 fputs (")@fixup\n", asm_out_file);
20504 }
20505 else
c8af3574 20506 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20507}
9739c90c
JJ
20508
20509void
a2369ed3 20510rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20511{
20512 if (TARGET_64BIT)
20513 {
20514 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20515 ASM_OUTPUT_LABEL (file, name);
20516 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20517 rs6000_output_function_entry (file, name);
20518 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20519 if (DOT_SYMBOLS)
9739c90c 20520 {
85b776df 20521 fputs ("\t.size\t", file);
9739c90c 20522 assemble_name (file, name);
85b776df
AM
20523 fputs (",24\n\t.type\t.", file);
20524 assemble_name (file, name);
20525 fputs (",@function\n", file);
20526 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20527 {
20528 fputs ("\t.globl\t.", file);
20529 assemble_name (file, name);
20530 putc ('\n', file);
20531 }
9739c90c 20532 }
85b776df
AM
20533 else
20534 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20535 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20536 rs6000_output_function_entry (file, name);
20537 fputs (":\n", file);
9739c90c
JJ
20538 return;
20539 }
20540
20541 if (TARGET_RELOCATABLE
7f970b70 20542 && !TARGET_SECURE_PLT
9739c90c 20543 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20544 && uses_TOC ())
9739c90c
JJ
20545 {
20546 char buf[256];
20547
20548 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20549
20550 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20551 fprintf (file, "\t.long ");
20552 assemble_name (file, buf);
20553 putc ('-', file);
20554 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20555 assemble_name (file, buf);
20556 putc ('\n', file);
20557 }
20558
20559 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20560 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20561
20562 if (DEFAULT_ABI == ABI_AIX)
20563 {
20564 const char *desc_name, *orig_name;
20565
20566 orig_name = (*targetm.strip_name_encoding) (name);
20567 desc_name = orig_name;
20568 while (*desc_name == '.')
20569 desc_name++;
20570
20571 if (TREE_PUBLIC (decl))
20572 fprintf (file, "\t.globl %s\n", desc_name);
20573
20574 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20575 fprintf (file, "%s:\n", desc_name);
20576 fprintf (file, "\t.long %s\n", orig_name);
20577 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20578 if (DEFAULT_ABI == ABI_AIX)
20579 fputs ("\t.long 0\n", file);
20580 fprintf (file, "\t.previous\n");
20581 }
20582 ASM_OUTPUT_LABEL (file, name);
20583}
1334b570
AM
20584
20585static void
20586rs6000_elf_end_indicate_exec_stack (void)
20587{
20588 if (TARGET_32BIT)
20589 file_end_indicate_exec_stack ();
20590}
7c262518
RH
20591#endif
20592
cbaaba19 20593#if TARGET_XCOFF
0d5817b2
DE
20594static void
20595rs6000_xcoff_asm_output_anchor (rtx symbol)
20596{
20597 char buffer[100];
20598
20599 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20600 SYMBOL_REF_BLOCK_OFFSET (symbol));
20601 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20602}
20603
7c262518 20604static void
a2369ed3 20605rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20606{
20607 fputs (GLOBAL_ASM_OP, stream);
20608 RS6000_OUTPUT_BASENAME (stream, name);
20609 putc ('\n', stream);
20610}
20611
d6b5193b
RS
20612/* A get_unnamed_decl callback, used for read-only sections. PTR
20613 points to the section string variable. */
20614
20615static void
20616rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20617{
890f9edf
OH
20618 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20619 *(const char *const *) directive,
20620 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20621}
20622
20623/* Likewise for read-write sections. */
20624
20625static void
20626rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20627{
890f9edf
OH
20628 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20629 *(const char *const *) directive,
20630 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20631}
20632
20633/* A get_unnamed_section callback, used for switching to toc_section. */
20634
20635static void
20636rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20637{
20638 if (TARGET_MINIMAL_TOC)
20639 {
20640 /* toc_section is always selected at least once from
20641 rs6000_xcoff_file_start, so this is guaranteed to
20642 always be defined once and only once in each file. */
20643 if (!toc_initialized)
20644 {
20645 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20646 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20647 toc_initialized = 1;
20648 }
20649 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20650 (TARGET_32BIT ? "" : ",3"));
20651 }
20652 else
20653 fputs ("\t.toc\n", asm_out_file);
20654}
20655
20656/* Implement TARGET_ASM_INIT_SECTIONS. */
20657
20658static void
20659rs6000_xcoff_asm_init_sections (void)
20660{
20661 read_only_data_section
20662 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20663 &xcoff_read_only_section_name);
20664
20665 private_data_section
20666 = get_unnamed_section (SECTION_WRITE,
20667 rs6000_xcoff_output_readwrite_section_asm_op,
20668 &xcoff_private_data_section_name);
20669
20670 read_only_private_data_section
20671 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20672 &xcoff_private_data_section_name);
20673
20674 toc_section
20675 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20676
20677 readonly_data_section = read_only_data_section;
20678 exception_section = data_section;
20679}
20680
9b580a0b
RH
20681static int
20682rs6000_xcoff_reloc_rw_mask (void)
20683{
20684 return 3;
20685}
20686
b275d088 20687static void
c18a5b6c
MM
20688rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20689 tree decl ATTRIBUTE_UNUSED)
7c262518 20690{
0e5dbd9b
DE
20691 int smclass;
20692 static const char * const suffix[3] = { "PR", "RO", "RW" };
20693
20694 if (flags & SECTION_CODE)
20695 smclass = 0;
20696 else if (flags & SECTION_WRITE)
20697 smclass = 2;
20698 else
20699 smclass = 1;
20700
5b5198f7 20701 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20702 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20703 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20704}
ae46c4e0 20705
d6b5193b 20706static section *
f676971a 20707rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20708 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20709{
9b580a0b 20710 if (decl_readonly_section (decl, reloc))
ae46c4e0 20711 {
0e5dbd9b 20712 if (TREE_PUBLIC (decl))
d6b5193b 20713 return read_only_data_section;
ae46c4e0 20714 else
d6b5193b 20715 return read_only_private_data_section;
ae46c4e0
RH
20716 }
20717 else
20718 {
0e5dbd9b 20719 if (TREE_PUBLIC (decl))
d6b5193b 20720 return data_section;
ae46c4e0 20721 else
d6b5193b 20722 return private_data_section;
ae46c4e0
RH
20723 }
20724}
20725
20726static void
a2369ed3 20727rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20728{
20729 const char *name;
ae46c4e0 20730
5b5198f7
DE
20731 /* Use select_section for private and uninitialized data. */
20732 if (!TREE_PUBLIC (decl)
20733 || DECL_COMMON (decl)
0e5dbd9b
DE
20734 || DECL_INITIAL (decl) == NULL_TREE
20735 || DECL_INITIAL (decl) == error_mark_node
20736 || (flag_zero_initialized_in_bss
20737 && initializer_zerop (DECL_INITIAL (decl))))
20738 return;
20739
20740 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20741 name = (*targetm.strip_name_encoding) (name);
20742 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20743}
b64a1b53 20744
fb49053f
RH
20745/* Select section for constant in constant pool.
20746
20747 On RS/6000, all constants are in the private read-only data area.
20748 However, if this is being placed in the TOC it must be output as a
20749 toc entry. */
20750
d6b5193b 20751static section *
f676971a 20752rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20753 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20754{
20755 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20756 return toc_section;
b64a1b53 20757 else
d6b5193b 20758 return read_only_private_data_section;
b64a1b53 20759}
772c5265
RH
20760
20761/* Remove any trailing [DS] or the like from the symbol name. */
20762
20763static const char *
a2369ed3 20764rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20765{
20766 size_t len;
20767 if (*name == '*')
20768 name++;
20769 len = strlen (name);
20770 if (name[len - 1] == ']')
20771 return ggc_alloc_string (name, len - 4);
20772 else
20773 return name;
20774}
20775
5add3202
DE
20776/* Section attributes. AIX is always PIC. */
20777
20778static unsigned int
a2369ed3 20779rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20780{
5b5198f7 20781 unsigned int align;
9b580a0b 20782 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20783
20784 /* Align to at least UNIT size. */
20785 if (flags & SECTION_CODE)
20786 align = MIN_UNITS_PER_WORD;
20787 else
20788 /* Increase alignment of large objects if not already stricter. */
20789 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20790 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20791 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20792
20793 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20794}
a5fe455b 20795
1bc7c5b6
ZW
20796/* Output at beginning of assembler file.
20797
20798 Initialize the section names for the RS/6000 at this point.
20799
20800 Specify filename, including full path, to assembler.
20801
20802 We want to go into the TOC section so at least one .toc will be emitted.
20803 Also, in order to output proper .bs/.es pairs, we need at least one static
20804 [RW] section emitted.
20805
20806 Finally, declare mcount when profiling to make the assembler happy. */
20807
20808static void
863d938c 20809rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20810{
20811 rs6000_gen_section_name (&xcoff_bss_section_name,
20812 main_input_filename, ".bss_");
20813 rs6000_gen_section_name (&xcoff_private_data_section_name,
20814 main_input_filename, ".rw_");
20815 rs6000_gen_section_name (&xcoff_read_only_section_name,
20816 main_input_filename, ".ro_");
20817
20818 fputs ("\t.file\t", asm_out_file);
20819 output_quoted_string (asm_out_file, main_input_filename);
20820 fputc ('\n', asm_out_file);
1bc7c5b6 20821 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20822 switch_to_section (private_data_section);
20823 switch_to_section (text_section);
1bc7c5b6
ZW
20824 if (profile_flag)
20825 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20826 rs6000_file_start ();
20827}
20828
a5fe455b
ZW
20829/* Output at end of assembler file.
20830 On the RS/6000, referencing data should automatically pull in text. */
20831
20832static void
863d938c 20833rs6000_xcoff_file_end (void)
a5fe455b 20834{
d6b5193b 20835 switch_to_section (text_section);
a5fe455b 20836 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20837 switch_to_section (data_section);
a5fe455b
ZW
20838 fputs (TARGET_32BIT
20839 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20840 asm_out_file);
20841}
f1384257 20842#endif /* TARGET_XCOFF */
0e5dbd9b 20843
3c50106f
RH
20844/* Compute a (partial) cost for rtx X. Return true if the complete
20845 cost has been computed, and false if subexpressions should be
20846 scanned. In either case, *TOTAL contains the cost result. */
20847
20848static bool
1494c534 20849rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20850{
f0517163
RS
20851 enum machine_mode mode = GET_MODE (x);
20852
3c50106f
RH
20853 switch (code)
20854 {
30a555d9 20855 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20856 case CONST_INT:
066cd967
DE
20857 if (((outer_code == SET
20858 || outer_code == PLUS
20859 || outer_code == MINUS)
279bb624
DE
20860 && (satisfies_constraint_I (x)
20861 || satisfies_constraint_L (x)))
066cd967 20862 || (outer_code == AND
279bb624
DE
20863 && (satisfies_constraint_K (x)
20864 || (mode == SImode
20865 ? satisfies_constraint_L (x)
20866 : satisfies_constraint_J (x))
1990cd79
AM
20867 || mask_operand (x, mode)
20868 || (mode == DImode
20869 && mask64_operand (x, DImode))))
22e54023 20870 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20871 && (satisfies_constraint_K (x)
20872 || (mode == SImode
20873 ? satisfies_constraint_L (x)
20874 : satisfies_constraint_J (x))))
066cd967
DE
20875 || outer_code == ASHIFT
20876 || outer_code == ASHIFTRT
20877 || outer_code == LSHIFTRT
20878 || outer_code == ROTATE
20879 || outer_code == ROTATERT
d5861a7a 20880 || outer_code == ZERO_EXTRACT
066cd967 20881 || (outer_code == MULT
279bb624 20882 && satisfies_constraint_I (x))
22e54023
DE
20883 || ((outer_code == DIV || outer_code == UDIV
20884 || outer_code == MOD || outer_code == UMOD)
20885 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20886 || (outer_code == COMPARE
279bb624
DE
20887 && (satisfies_constraint_I (x)
20888 || satisfies_constraint_K (x)))
22e54023 20889 || (outer_code == EQ
279bb624
DE
20890 && (satisfies_constraint_I (x)
20891 || satisfies_constraint_K (x)
20892 || (mode == SImode
20893 ? satisfies_constraint_L (x)
20894 : satisfies_constraint_J (x))))
22e54023 20895 || (outer_code == GTU
279bb624 20896 && satisfies_constraint_I (x))
22e54023 20897 || (outer_code == LTU
279bb624 20898 && satisfies_constraint_P (x)))
066cd967
DE
20899 {
20900 *total = 0;
20901 return true;
20902 }
20903 else if ((outer_code == PLUS
4ae234b0 20904 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20905 || (outer_code == MINUS
4ae234b0 20906 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20907 || ((outer_code == SET
20908 || outer_code == IOR
20909 || outer_code == XOR)
20910 && (INTVAL (x)
20911 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20912 {
20913 *total = COSTS_N_INSNS (1);
20914 return true;
20915 }
20916 /* FALLTHRU */
20917
20918 case CONST_DOUBLE:
f6fe3a22 20919 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20920 {
f6fe3a22
DE
20921 if ((outer_code == IOR || outer_code == XOR)
20922 && CONST_DOUBLE_HIGH (x) == 0
20923 && (CONST_DOUBLE_LOW (x)
20924 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20925 {
20926 *total = 0;
20927 return true;
20928 }
20929 else if ((outer_code == AND && and64_2_operand (x, DImode))
20930 || ((outer_code == SET
20931 || outer_code == IOR
20932 || outer_code == XOR)
20933 && CONST_DOUBLE_HIGH (x) == 0))
20934 {
20935 *total = COSTS_N_INSNS (1);
20936 return true;
20937 }
066cd967
DE
20938 }
20939 /* FALLTHRU */
20940
3c50106f 20941 case CONST:
066cd967 20942 case HIGH:
3c50106f 20943 case SYMBOL_REF:
066cd967
DE
20944 case MEM:
20945 /* When optimizing for size, MEM should be slightly more expensive
20946 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20947 L1 cache latency is about two instructions. */
066cd967 20948 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20949 return true;
20950
30a555d9
DE
20951 case LABEL_REF:
20952 *total = 0;
20953 return true;
20954
3c50106f 20955 case PLUS:
f0517163 20956 if (mode == DFmode)
066cd967
DE
20957 {
20958 if (GET_CODE (XEXP (x, 0)) == MULT)
20959 {
20960 /* FNMA accounted in outer NEG. */
20961 if (outer_code == NEG)
20962 *total = rs6000_cost->dmul - rs6000_cost->fp;
20963 else
20964 *total = rs6000_cost->dmul;
20965 }
20966 else
20967 *total = rs6000_cost->fp;
20968 }
f0517163 20969 else if (mode == SFmode)
066cd967
DE
20970 {
20971 /* FNMA accounted in outer NEG. */
20972 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20973 *total = 0;
20974 else
20975 *total = rs6000_cost->fp;
20976 }
f0517163 20977 else
066cd967
DE
20978 *total = COSTS_N_INSNS (1);
20979 return false;
3c50106f 20980
52190329 20981 case MINUS:
f0517163 20982 if (mode == DFmode)
066cd967 20983 {
762c919f
JM
20984 if (GET_CODE (XEXP (x, 0)) == MULT
20985 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20986 {
20987 /* FNMA accounted in outer NEG. */
20988 if (outer_code == NEG)
762c919f 20989 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20990 else
20991 *total = rs6000_cost->dmul;
20992 }
20993 else
20994 *total = rs6000_cost->fp;
20995 }
f0517163 20996 else if (mode == SFmode)
066cd967
DE
20997 {
20998 /* FNMA accounted in outer NEG. */
20999 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21000 *total = 0;
21001 else
21002 *total = rs6000_cost->fp;
21003 }
f0517163 21004 else
c4ad648e 21005 *total = COSTS_N_INSNS (1);
066cd967 21006 return false;
3c50106f
RH
21007
21008 case MULT:
c9dbf840 21009 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21010 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21011 {
8b897cfa
RS
21012 if (INTVAL (XEXP (x, 1)) >= -256
21013 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21014 *total = rs6000_cost->mulsi_const9;
8b897cfa 21015 else
06a67bdd 21016 *total = rs6000_cost->mulsi_const;
3c50106f 21017 }
066cd967
DE
21018 /* FMA accounted in outer PLUS/MINUS. */
21019 else if ((mode == DFmode || mode == SFmode)
21020 && (outer_code == PLUS || outer_code == MINUS))
21021 *total = 0;
f0517163 21022 else if (mode == DFmode)
06a67bdd 21023 *total = rs6000_cost->dmul;
f0517163 21024 else if (mode == SFmode)
06a67bdd 21025 *total = rs6000_cost->fp;
f0517163 21026 else if (mode == DImode)
06a67bdd 21027 *total = rs6000_cost->muldi;
8b897cfa 21028 else
06a67bdd 21029 *total = rs6000_cost->mulsi;
066cd967 21030 return false;
3c50106f
RH
21031
21032 case DIV:
21033 case MOD:
f0517163
RS
21034 if (FLOAT_MODE_P (mode))
21035 {
06a67bdd
RS
21036 *total = mode == DFmode ? rs6000_cost->ddiv
21037 : rs6000_cost->sdiv;
066cd967 21038 return false;
f0517163 21039 }
5efb1046 21040 /* FALLTHRU */
3c50106f
RH
21041
21042 case UDIV:
21043 case UMOD:
627b6fe2
DJ
21044 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21045 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21046 {
21047 if (code == DIV || code == MOD)
21048 /* Shift, addze */
21049 *total = COSTS_N_INSNS (2);
21050 else
21051 /* Shift */
21052 *total = COSTS_N_INSNS (1);
21053 }
c4ad648e 21054 else
627b6fe2
DJ
21055 {
21056 if (GET_MODE (XEXP (x, 1)) == DImode)
21057 *total = rs6000_cost->divdi;
21058 else
21059 *total = rs6000_cost->divsi;
21060 }
21061 /* Add in shift and subtract for MOD. */
21062 if (code == MOD || code == UMOD)
21063 *total += COSTS_N_INSNS (2);
066cd967 21064 return false;
3c50106f 21065
32f56aad 21066 case CTZ:
3c50106f
RH
21067 case FFS:
21068 *total = COSTS_N_INSNS (4);
066cd967 21069 return false;
3c50106f 21070
32f56aad
DE
21071 case POPCOUNT:
21072 *total = COSTS_N_INSNS (6);
21073 return false;
21074
06a67bdd 21075 case NOT:
066cd967
DE
21076 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21077 {
21078 *total = 0;
21079 return false;
21080 }
21081 /* FALLTHRU */
21082
21083 case AND:
32f56aad 21084 case CLZ:
066cd967
DE
21085 case IOR:
21086 case XOR:
d5861a7a
DE
21087 case ZERO_EXTRACT:
21088 *total = COSTS_N_INSNS (1);
21089 return false;
21090
066cd967
DE
21091 case ASHIFT:
21092 case ASHIFTRT:
21093 case LSHIFTRT:
21094 case ROTATE:
21095 case ROTATERT:
d5861a7a 21096 /* Handle mul_highpart. */
066cd967
DE
21097 if (outer_code == TRUNCATE
21098 && GET_CODE (XEXP (x, 0)) == MULT)
21099 {
21100 if (mode == DImode)
21101 *total = rs6000_cost->muldi;
21102 else
21103 *total = rs6000_cost->mulsi;
21104 return true;
21105 }
d5861a7a
DE
21106 else if (outer_code == AND)
21107 *total = 0;
21108 else
21109 *total = COSTS_N_INSNS (1);
21110 return false;
21111
21112 case SIGN_EXTEND:
21113 case ZERO_EXTEND:
21114 if (GET_CODE (XEXP (x, 0)) == MEM)
21115 *total = 0;
21116 else
21117 *total = COSTS_N_INSNS (1);
066cd967 21118 return false;
06a67bdd 21119
066cd967
DE
21120 case COMPARE:
21121 case NEG:
21122 case ABS:
21123 if (!FLOAT_MODE_P (mode))
21124 {
21125 *total = COSTS_N_INSNS (1);
21126 return false;
21127 }
21128 /* FALLTHRU */
21129
21130 case FLOAT:
21131 case UNSIGNED_FLOAT:
21132 case FIX:
21133 case UNSIGNED_FIX:
06a67bdd
RS
21134 case FLOAT_TRUNCATE:
21135 *total = rs6000_cost->fp;
066cd967 21136 return false;
06a67bdd 21137
a2af5043
DJ
21138 case FLOAT_EXTEND:
21139 if (mode == DFmode)
21140 *total = 0;
21141 else
21142 *total = rs6000_cost->fp;
21143 return false;
21144
06a67bdd
RS
21145 case UNSPEC:
21146 switch (XINT (x, 1))
21147 {
21148 case UNSPEC_FRSP:
21149 *total = rs6000_cost->fp;
21150 return true;
21151
21152 default:
21153 break;
21154 }
21155 break;
21156
21157 case CALL:
21158 case IF_THEN_ELSE:
21159 if (optimize_size)
21160 {
21161 *total = COSTS_N_INSNS (1);
21162 return true;
21163 }
066cd967
DE
21164 else if (FLOAT_MODE_P (mode)
21165 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21166 {
21167 *total = rs6000_cost->fp;
21168 return false;
21169 }
06a67bdd
RS
21170 break;
21171
c0600ecd
DE
21172 case EQ:
21173 case GTU:
21174 case LTU:
22e54023
DE
21175 /* Carry bit requires mode == Pmode.
21176 NEG or PLUS already counted so only add one. */
21177 if (mode == Pmode
21178 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21179 {
22e54023
DE
21180 *total = COSTS_N_INSNS (1);
21181 return true;
21182 }
21183 if (outer_code == SET)
21184 {
21185 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21186 {
22e54023 21187 *total = COSTS_N_INSNS (2);
c0600ecd 21188 return true;
c0600ecd 21189 }
22e54023
DE
21190 else if (mode == Pmode)
21191 {
21192 *total = COSTS_N_INSNS (3);
21193 return false;
21194 }
21195 }
21196 /* FALLTHRU */
21197
21198 case GT:
21199 case LT:
21200 case UNORDERED:
21201 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21202 {
21203 *total = COSTS_N_INSNS (2);
21204 return true;
c0600ecd 21205 }
22e54023
DE
21206 /* CC COMPARE. */
21207 if (outer_code == COMPARE)
21208 {
21209 *total = 0;
21210 return true;
21211 }
21212 break;
c0600ecd 21213
3c50106f 21214 default:
06a67bdd 21215 break;
3c50106f 21216 }
06a67bdd
RS
21217
21218 return false;
3c50106f
RH
21219}
21220
34bb030a
DE
21221/* A C expression returning the cost of moving data from a register of class
21222 CLASS1 to one of CLASS2. */
21223
21224int
f676971a 21225rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21226 enum reg_class from, enum reg_class to)
34bb030a
DE
21227{
21228 /* Moves from/to GENERAL_REGS. */
21229 if (reg_classes_intersect_p (to, GENERAL_REGS)
21230 || reg_classes_intersect_p (from, GENERAL_REGS))
21231 {
21232 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21233 from = to;
21234
21235 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21236 return (rs6000_memory_move_cost (mode, from, 0)
21237 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21238
c4ad648e
AM
21239 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21240 shift. */
34bb030a
DE
21241 else if (from == CR_REGS)
21242 return 4;
21243
21244 else
c4ad648e 21245 /* A move will cost one instruction per GPR moved. */
c8b622ff 21246 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21247 }
21248
c4ad648e 21249 /* Moving between two similar registers is just one instruction. */
34bb030a 21250 else if (reg_classes_intersect_p (to, from))
7393f7f8 21251 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21252
c4ad648e 21253 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21254 else
f676971a 21255 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21256 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21257}
21258
21259/* A C expressions returning the cost of moving data of MODE from a register to
21260 or from memory. */
21261
21262int
f676971a 21263rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21264 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21265{
21266 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21267 return 4 * hard_regno_nregs[0][mode];
34bb030a 21268 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21269 return 4 * hard_regno_nregs[32][mode];
34bb030a 21270 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21271 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21272 else
21273 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21274}
21275
9c78b944
DE
21276/* Returns a code for a target-specific builtin that implements
21277 reciprocal of the function, or NULL_TREE if not available. */
21278
21279static tree
21280rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21281 bool sqrt ATTRIBUTE_UNUSED)
21282{
21283 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21284 && flag_finite_math_only && !flag_trapping_math
21285 && flag_unsafe_math_optimizations))
21286 return NULL_TREE;
21287
21288 if (md_fn)
21289 return NULL_TREE;
21290 else
21291 switch (fn)
21292 {
21293 case BUILT_IN_SQRTF:
21294 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21295
21296 default:
21297 return NULL_TREE;
21298 }
21299}
21300
ef765ea9
DE
21301/* Newton-Raphson approximation of single-precision floating point divide n/d.
21302 Assumes no trapping math and finite arguments. */
21303
21304void
9c78b944 21305rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21306{
21307 rtx x0, e0, e1, y1, u0, v0, one;
21308
21309 x0 = gen_reg_rtx (SFmode);
21310 e0 = gen_reg_rtx (SFmode);
21311 e1 = gen_reg_rtx (SFmode);
21312 y1 = gen_reg_rtx (SFmode);
21313 u0 = gen_reg_rtx (SFmode);
21314 v0 = gen_reg_rtx (SFmode);
21315 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21316
21317 /* x0 = 1./d estimate */
21318 emit_insn (gen_rtx_SET (VOIDmode, x0,
21319 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21320 UNSPEC_FRES)));
21321 /* e0 = 1. - d * x0 */
21322 emit_insn (gen_rtx_SET (VOIDmode, e0,
21323 gen_rtx_MINUS (SFmode, one,
21324 gen_rtx_MULT (SFmode, d, x0))));
21325 /* e1 = e0 + e0 * e0 */
21326 emit_insn (gen_rtx_SET (VOIDmode, e1,
21327 gen_rtx_PLUS (SFmode,
21328 gen_rtx_MULT (SFmode, e0, e0), e0)));
21329 /* y1 = x0 + e1 * x0 */
21330 emit_insn (gen_rtx_SET (VOIDmode, y1,
21331 gen_rtx_PLUS (SFmode,
21332 gen_rtx_MULT (SFmode, e1, x0), x0)));
21333 /* u0 = n * y1 */
21334 emit_insn (gen_rtx_SET (VOIDmode, u0,
21335 gen_rtx_MULT (SFmode, n, y1)));
21336 /* v0 = n - d * u0 */
21337 emit_insn (gen_rtx_SET (VOIDmode, v0,
21338 gen_rtx_MINUS (SFmode, n,
21339 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21340 /* dst = u0 + v0 * y1 */
21341 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21342 gen_rtx_PLUS (SFmode,
21343 gen_rtx_MULT (SFmode, v0, y1), u0)));
21344}
21345
21346/* Newton-Raphson approximation of double-precision floating point divide n/d.
21347 Assumes no trapping math and finite arguments. */
21348
21349void
9c78b944 21350rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21351{
21352 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21353
21354 x0 = gen_reg_rtx (DFmode);
21355 e0 = gen_reg_rtx (DFmode);
21356 e1 = gen_reg_rtx (DFmode);
21357 e2 = gen_reg_rtx (DFmode);
21358 y1 = gen_reg_rtx (DFmode);
21359 y2 = gen_reg_rtx (DFmode);
21360 y3 = gen_reg_rtx (DFmode);
21361 u0 = gen_reg_rtx (DFmode);
21362 v0 = gen_reg_rtx (DFmode);
21363 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21364
21365 /* x0 = 1./d estimate */
21366 emit_insn (gen_rtx_SET (VOIDmode, x0,
21367 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21368 UNSPEC_FRES)));
21369 /* e0 = 1. - d * x0 */
21370 emit_insn (gen_rtx_SET (VOIDmode, e0,
21371 gen_rtx_MINUS (DFmode, one,
21372 gen_rtx_MULT (SFmode, d, x0))));
21373 /* y1 = x0 + e0 * x0 */
21374 emit_insn (gen_rtx_SET (VOIDmode, y1,
21375 gen_rtx_PLUS (DFmode,
21376 gen_rtx_MULT (DFmode, e0, x0), x0)));
21377 /* e1 = e0 * e0 */
21378 emit_insn (gen_rtx_SET (VOIDmode, e1,
21379 gen_rtx_MULT (DFmode, e0, e0)));
21380 /* y2 = y1 + e1 * y1 */
21381 emit_insn (gen_rtx_SET (VOIDmode, y2,
21382 gen_rtx_PLUS (DFmode,
21383 gen_rtx_MULT (DFmode, e1, y1), y1)));
21384 /* e2 = e1 * e1 */
21385 emit_insn (gen_rtx_SET (VOIDmode, e2,
21386 gen_rtx_MULT (DFmode, e1, e1)));
21387 /* y3 = y2 + e2 * y2 */
21388 emit_insn (gen_rtx_SET (VOIDmode, y3,
21389 gen_rtx_PLUS (DFmode,
21390 gen_rtx_MULT (DFmode, e2, y2), y2)));
21391 /* u0 = n * y3 */
21392 emit_insn (gen_rtx_SET (VOIDmode, u0,
21393 gen_rtx_MULT (DFmode, n, y3)));
21394 /* v0 = n - d * u0 */
21395 emit_insn (gen_rtx_SET (VOIDmode, v0,
21396 gen_rtx_MINUS (DFmode, n,
21397 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21398 /* dst = u0 + v0 * y3 */
21399 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21400 gen_rtx_PLUS (DFmode,
21401 gen_rtx_MULT (DFmode, v0, y3), u0)));
21402}
21403
565ef4ba 21404
9c78b944
DE
21405/* Newton-Raphson approximation of single-precision floating point rsqrt.
21406 Assumes no trapping math and finite arguments. */
21407
21408void
21409rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21410{
21411 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21412 half, one, halfthree, c1, cond, label;
21413
21414 x0 = gen_reg_rtx (SFmode);
21415 x1 = gen_reg_rtx (SFmode);
21416 x2 = gen_reg_rtx (SFmode);
21417 y1 = gen_reg_rtx (SFmode);
21418 u0 = gen_reg_rtx (SFmode);
21419 u1 = gen_reg_rtx (SFmode);
21420 u2 = gen_reg_rtx (SFmode);
21421 v0 = gen_reg_rtx (SFmode);
21422 v1 = gen_reg_rtx (SFmode);
21423 v2 = gen_reg_rtx (SFmode);
21424 t0 = gen_reg_rtx (SFmode);
21425 halfthree = gen_reg_rtx (SFmode);
21426 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21427 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21428
21429 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21430 emit_insn (gen_rtx_SET (VOIDmode, t0,
21431 gen_rtx_MULT (SFmode, src, src)));
21432
21433 emit_insn (gen_rtx_SET (VOIDmode, cond,
21434 gen_rtx_COMPARE (CCFPmode, t0, src)));
21435 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21436 emit_unlikely_jump (c1, label);
21437
21438 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21439 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21440
21441 /* halfthree = 1.5 = 1.0 + 0.5 */
21442 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21443 gen_rtx_PLUS (SFmode, one, half)));
21444
21445 /* x0 = rsqrt estimate */
21446 emit_insn (gen_rtx_SET (VOIDmode, x0,
21447 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21448 UNSPEC_RSQRT)));
21449
21450 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21451 emit_insn (gen_rtx_SET (VOIDmode, y1,
21452 gen_rtx_MINUS (SFmode,
21453 gen_rtx_MULT (SFmode, src, halfthree),
21454 src)));
21455
21456 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21457 emit_insn (gen_rtx_SET (VOIDmode, u0,
21458 gen_rtx_MULT (SFmode, x0, x0)));
21459 emit_insn (gen_rtx_SET (VOIDmode, v0,
21460 gen_rtx_MINUS (SFmode,
21461 halfthree,
21462 gen_rtx_MULT (SFmode, y1, u0))));
21463 emit_insn (gen_rtx_SET (VOIDmode, x1,
21464 gen_rtx_MULT (SFmode, x0, v0)));
21465
21466 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21467 emit_insn (gen_rtx_SET (VOIDmode, u1,
21468 gen_rtx_MULT (SFmode, x1, x1)));
21469 emit_insn (gen_rtx_SET (VOIDmode, v1,
21470 gen_rtx_MINUS (SFmode,
21471 halfthree,
21472 gen_rtx_MULT (SFmode, y1, u1))));
21473 emit_insn (gen_rtx_SET (VOIDmode, x2,
21474 gen_rtx_MULT (SFmode, x1, v1)));
21475
21476 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21477 emit_insn (gen_rtx_SET (VOIDmode, u2,
21478 gen_rtx_MULT (SFmode, x2, x2)));
21479 emit_insn (gen_rtx_SET (VOIDmode, v2,
21480 gen_rtx_MINUS (SFmode,
21481 halfthree,
21482 gen_rtx_MULT (SFmode, y1, u2))));
21483 emit_insn (gen_rtx_SET (VOIDmode, dst,
21484 gen_rtx_MULT (SFmode, x2, v2)));
21485
21486 emit_label (XEXP (label, 0));
21487}
21488
565ef4ba
RS
21489/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21490 target, and SRC is the argument operand. */
21491
21492void
21493rs6000_emit_popcount (rtx dst, rtx src)
21494{
21495 enum machine_mode mode = GET_MODE (dst);
21496 rtx tmp1, tmp2;
21497
21498 tmp1 = gen_reg_rtx (mode);
21499
21500 if (mode == SImode)
21501 {
21502 emit_insn (gen_popcntbsi2 (tmp1, src));
21503 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21504 NULL_RTX, 0);
21505 tmp2 = force_reg (SImode, tmp2);
21506 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21507 }
21508 else
21509 {
21510 emit_insn (gen_popcntbdi2 (tmp1, src));
21511 tmp2 = expand_mult (DImode, tmp1,
21512 GEN_INT ((HOST_WIDE_INT)
21513 0x01010101 << 32 | 0x01010101),
21514 NULL_RTX, 0);
21515 tmp2 = force_reg (DImode, tmp2);
21516 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21517 }
21518}
21519
21520
21521/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21522 target, and SRC is the argument operand. */
21523
21524void
21525rs6000_emit_parity (rtx dst, rtx src)
21526{
21527 enum machine_mode mode = GET_MODE (dst);
21528 rtx tmp;
21529
21530 tmp = gen_reg_rtx (mode);
21531 if (mode == SImode)
21532 {
21533 /* Is mult+shift >= shift+xor+shift+xor? */
21534 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21535 {
21536 rtx tmp1, tmp2, tmp3, tmp4;
21537
21538 tmp1 = gen_reg_rtx (SImode);
21539 emit_insn (gen_popcntbsi2 (tmp1, src));
21540
21541 tmp2 = gen_reg_rtx (SImode);
21542 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21543 tmp3 = gen_reg_rtx (SImode);
21544 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21545
21546 tmp4 = gen_reg_rtx (SImode);
21547 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21548 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21549 }
21550 else
21551 rs6000_emit_popcount (tmp, src);
21552 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21553 }
21554 else
21555 {
21556 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21557 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21558 {
21559 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21560
21561 tmp1 = gen_reg_rtx (DImode);
21562 emit_insn (gen_popcntbdi2 (tmp1, src));
21563
21564 tmp2 = gen_reg_rtx (DImode);
21565 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21566 tmp3 = gen_reg_rtx (DImode);
21567 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21568
21569 tmp4 = gen_reg_rtx (DImode);
21570 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21571 tmp5 = gen_reg_rtx (DImode);
21572 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21573
21574 tmp6 = gen_reg_rtx (DImode);
21575 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21576 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21577 }
21578 else
21579 rs6000_emit_popcount (tmp, src);
21580 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21581 }
21582}
21583
ded9bf77
AH
21584/* Return an RTX representing where to find the function value of a
21585 function returning MODE. */
21586static rtx
21587rs6000_complex_function_value (enum machine_mode mode)
21588{
21589 unsigned int regno;
21590 rtx r1, r2;
21591 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21592 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21593
18f63bfa
AH
21594 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21595 regno = FP_ARG_RETURN;
354ed18f
AH
21596 else
21597 {
18f63bfa 21598 regno = GP_ARG_RETURN;
ded9bf77 21599
18f63bfa
AH
21600 /* 32-bit is OK since it'll go in r3/r4. */
21601 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21602 return gen_rtx_REG (mode, regno);
21603 }
21604
18f63bfa
AH
21605 if (inner_bytes >= 8)
21606 return gen_rtx_REG (mode, regno);
21607
ded9bf77
AH
21608 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21609 const0_rtx);
21610 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21611 GEN_INT (inner_bytes));
ded9bf77
AH
21612 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21613}
21614
a6ebc39a
AH
21615/* Define how to find the value returned by a function.
21616 VALTYPE is the data type of the value (as a tree).
21617 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21618 otherwise, FUNC is 0.
21619
21620 On the SPE, both FPs and vectors are returned in r3.
21621
21622 On RS/6000 an integer value is in r3 and a floating-point value is in
21623 fp1, unless -msoft-float. */
21624
21625rtx
586de218 21626rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21627{
21628 enum machine_mode mode;
2a8fa26c 21629 unsigned int regno;
a6ebc39a 21630
594a51fe
SS
21631 /* Special handling for structs in darwin64. */
21632 if (rs6000_darwin64_abi
21633 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21634 && TREE_CODE (valtype) == RECORD_TYPE
21635 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21636 {
21637 CUMULATIVE_ARGS valcum;
21638 rtx valret;
21639
0b5383eb 21640 valcum.words = 0;
594a51fe
SS
21641 valcum.fregno = FP_ARG_MIN_REG;
21642 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21643 /* Do a trial code generation as if this were going to be passed as
21644 an argument; if any part goes in memory, we return NULL. */
21645 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21646 if (valret)
21647 return valret;
21648 /* Otherwise fall through to standard ABI rules. */
21649 }
21650
0e67400a
FJ
21651 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21652 {
21653 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21654 return gen_rtx_PARALLEL (DImode,
21655 gen_rtvec (2,
21656 gen_rtx_EXPR_LIST (VOIDmode,
21657 gen_rtx_REG (SImode, GP_ARG_RETURN),
21658 const0_rtx),
21659 gen_rtx_EXPR_LIST (VOIDmode,
21660 gen_rtx_REG (SImode,
21661 GP_ARG_RETURN + 1),
21662 GEN_INT (4))));
21663 }
0f086e42
FJ
21664 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21665 {
21666 return gen_rtx_PARALLEL (DCmode,
21667 gen_rtvec (4,
21668 gen_rtx_EXPR_LIST (VOIDmode,
21669 gen_rtx_REG (SImode, GP_ARG_RETURN),
21670 const0_rtx),
21671 gen_rtx_EXPR_LIST (VOIDmode,
21672 gen_rtx_REG (SImode,
21673 GP_ARG_RETURN + 1),
21674 GEN_INT (4)),
21675 gen_rtx_EXPR_LIST (VOIDmode,
21676 gen_rtx_REG (SImode,
21677 GP_ARG_RETURN + 2),
21678 GEN_INT (8)),
21679 gen_rtx_EXPR_LIST (VOIDmode,
21680 gen_rtx_REG (SImode,
21681 GP_ARG_RETURN + 3),
21682 GEN_INT (12))));
21683 }
602ea4d3 21684
7348aa7f
FXC
21685 mode = TYPE_MODE (valtype);
21686 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21687 || POINTER_TYPE_P (valtype))
b78d48dd 21688 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21689
e41b2a33
PB
21690 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21691 /* _Decimal128 must use an even/odd register pair. */
21692 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21693 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21694 regno = FP_ARG_RETURN;
ded9bf77 21695 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21696 && targetm.calls.split_complex_arg)
ded9bf77 21697 return rs6000_complex_function_value (mode);
44688022 21698 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21699 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21700 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21701 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21702 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21703 && (mode == DFmode || mode == DDmode || mode == DCmode
21704 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21705 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21706 else
21707 regno = GP_ARG_RETURN;
21708
21709 return gen_rtx_REG (mode, regno);
21710}
21711
ded9bf77
AH
21712/* Define how to find the value returned by a library function
21713 assuming the value has mode MODE. */
21714rtx
21715rs6000_libcall_value (enum machine_mode mode)
21716{
21717 unsigned int regno;
21718
2e6c9641
FJ
21719 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21720 {
21721 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21722 return gen_rtx_PARALLEL (DImode,
21723 gen_rtvec (2,
21724 gen_rtx_EXPR_LIST (VOIDmode,
21725 gen_rtx_REG (SImode, GP_ARG_RETURN),
21726 const0_rtx),
21727 gen_rtx_EXPR_LIST (VOIDmode,
21728 gen_rtx_REG (SImode,
21729 GP_ARG_RETURN + 1),
21730 GEN_INT (4))));
21731 }
21732
e41b2a33
PB
21733 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21734 /* _Decimal128 must use an even/odd register pair. */
21735 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21736 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21737 && TARGET_HARD_FLOAT && TARGET_FPRS)
21738 regno = FP_ARG_RETURN;
44688022
AM
21739 else if (ALTIVEC_VECTOR_MODE (mode)
21740 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21741 regno = ALTIVEC_ARG_RETURN;
42ba5130 21742 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21743 return rs6000_complex_function_value (mode);
18f63bfa 21744 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21745 && (mode == DFmode || mode == DDmode || mode == DCmode
21746 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21747 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21748 else
21749 regno = GP_ARG_RETURN;
21750
21751 return gen_rtx_REG (mode, regno);
21752}
21753
d1d0c603
JJ
21754/* Define the offset between two registers, FROM to be eliminated and its
21755 replacement TO, at the start of a routine. */
21756HOST_WIDE_INT
21757rs6000_initial_elimination_offset (int from, int to)
21758{
21759 rs6000_stack_t *info = rs6000_stack_info ();
21760 HOST_WIDE_INT offset;
21761
7d5175e1 21762 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21763 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21764 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21765 {
21766 offset = info->push_p ? 0 : -info->total_size;
21767 if (FRAME_GROWS_DOWNWARD)
5b667039 21768 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21769 }
21770 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21771 offset = FRAME_GROWS_DOWNWARD
5b667039 21772 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21773 : 0;
21774 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21775 offset = info->total_size;
21776 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21777 offset = info->push_p ? info->total_size : 0;
21778 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21779 offset = 0;
21780 else
37409796 21781 gcc_unreachable ();
d1d0c603
JJ
21782
21783 return offset;
21784}
21785
58646b77 21786/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21787
c8e4f0e9 21788static bool
3101faab 21789rs6000_is_opaque_type (const_tree type)
62e1dfcf 21790{
58646b77 21791 return (type == opaque_V2SI_type_node
2abe3e28 21792 || type == opaque_V2SF_type_node
58646b77
PB
21793 || type == opaque_p_V2SI_type_node
21794 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21795}
21796
96714395 21797static rtx
a2369ed3 21798rs6000_dwarf_register_span (rtx reg)
96714395
AH
21799{
21800 unsigned regno;
21801
4d4cbc0e
AH
21802 if (TARGET_SPE
21803 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21804 || (TARGET_E500_DOUBLE
21805 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21806 ;
21807 else
96714395
AH
21808 return NULL_RTX;
21809
21810 regno = REGNO (reg);
21811
21812 /* The duality of the SPE register size wreaks all kinds of havoc.
21813 This is a way of distinguishing r0 in 32-bits from r0 in
21814 64-bits. */
21815 return
21816 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21817 BYTES_BIG_ENDIAN
21818 ? gen_rtvec (2,
21819 gen_rtx_REG (SImode, regno + 1200),
21820 gen_rtx_REG (SImode, regno))
21821 : gen_rtvec (2,
21822 gen_rtx_REG (SImode, regno),
21823 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21824}
21825
37ea0b7e
JM
21826/* Fill in sizes for SPE register high parts in table used by unwinder. */
21827
21828static void
21829rs6000_init_dwarf_reg_sizes_extra (tree address)
21830{
21831 if (TARGET_SPE)
21832 {
21833 int i;
21834 enum machine_mode mode = TYPE_MODE (char_type_node);
21835 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21836 rtx mem = gen_rtx_MEM (BLKmode, addr);
21837 rtx value = gen_int_mode (4, mode);
21838
21839 for (i = 1201; i < 1232; i++)
21840 {
21841 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21842 HOST_WIDE_INT offset
21843 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21844
21845 emit_move_insn (adjust_address (mem, mode, offset), value);
21846 }
21847 }
21848}
21849
93c9d1ba
AM
21850/* Map internal gcc register numbers to DWARF2 register numbers. */
21851
21852unsigned int
21853rs6000_dbx_register_number (unsigned int regno)
21854{
21855 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21856 return regno;
21857 if (regno == MQ_REGNO)
21858 return 100;
1de43f85 21859 if (regno == LR_REGNO)
93c9d1ba 21860 return 108;
1de43f85 21861 if (regno == CTR_REGNO)
93c9d1ba
AM
21862 return 109;
21863 if (CR_REGNO_P (regno))
21864 return regno - CR0_REGNO + 86;
21865 if (regno == XER_REGNO)
21866 return 101;
21867 if (ALTIVEC_REGNO_P (regno))
21868 return regno - FIRST_ALTIVEC_REGNO + 1124;
21869 if (regno == VRSAVE_REGNO)
21870 return 356;
21871 if (regno == VSCR_REGNO)
21872 return 67;
21873 if (regno == SPE_ACC_REGNO)
21874 return 99;
21875 if (regno == SPEFSCR_REGNO)
21876 return 612;
21877 /* SPE high reg number. We get these values of regno from
21878 rs6000_dwarf_register_span. */
37409796
NS
21879 gcc_assert (regno >= 1200 && regno < 1232);
21880 return regno;
93c9d1ba
AM
21881}
21882
93f90be6 21883/* target hook eh_return_filter_mode */
f676971a 21884static enum machine_mode
93f90be6
FJ
21885rs6000_eh_return_filter_mode (void)
21886{
21887 return TARGET_32BIT ? SImode : word_mode;
21888}
21889
00b79d54
BE
21890/* Target hook for scalar_mode_supported_p. */
21891static bool
21892rs6000_scalar_mode_supported_p (enum machine_mode mode)
21893{
21894 if (DECIMAL_FLOAT_MODE_P (mode))
21895 return true;
21896 else
21897 return default_scalar_mode_supported_p (mode);
21898}
21899
f676971a
EC
21900/* Target hook for vector_mode_supported_p. */
21901static bool
21902rs6000_vector_mode_supported_p (enum machine_mode mode)
21903{
21904
96038623
DE
21905 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21906 return true;
21907
f676971a
EC
21908 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21909 return true;
21910
21911 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21912 return true;
21913
21914 else
21915 return false;
21916}
21917
bb8df8a6
EC
21918/* Target hook for invalid_arg_for_unprototyped_fn. */
21919static const char *
3101faab 21920invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21921{
21922 return (!rs6000_darwin64_abi
21923 && typelist == 0
21924 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21925 && (funcdecl == NULL_TREE
21926 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21927 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21928 ? N_("AltiVec argument passed to unprototyped function")
21929 : NULL;
21930}
21931
3aebbe5f
JJ
21932/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21933 setup by using __stack_chk_fail_local hidden function instead of
21934 calling __stack_chk_fail directly. Otherwise it is better to call
21935 __stack_chk_fail directly. */
21936
21937static tree
21938rs6000_stack_protect_fail (void)
21939{
21940 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21941 ? default_hidden_stack_protect_fail ()
21942 : default_external_stack_protect_fail ();
21943}
21944
17211ab5 21945#include "gt-rs6000.h"