]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR target/35364 (ICE on ia64 with vector declaration inside #pragma omp parallel)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
a3170dc6
AH
177/* Nonzero if we want SPE ABI extensions. */
178int rs6000_spe_abi;
179
5da702b1
AH
180/* Nonzero if floating point operations are done in the GPRs. */
181int rs6000_float_gprs = 0;
182
594a51fe
SS
183/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
184int rs6000_darwin64_abi;
185
a0ab749a 186/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 187static GTY(()) int common_mode_defined;
c81bebd7 188
9878760c
RK
189/* Save information from a "cmpxx" operation until the branch or scc is
190 emitted. */
9878760c
RK
191rtx rs6000_compare_op0, rs6000_compare_op1;
192int rs6000_compare_fp_p;
874a0744 193
874a0744
MM
194/* Label number of label created for -mrelocatable, to call to so we can
195 get the address of the GOT section */
196int rs6000_pic_labelno;
c81bebd7 197
b91da81f 198#ifdef USING_ELFOS_H
c81bebd7 199/* Which abi to adhere to */
9739c90c 200const char *rs6000_abi_name;
d9407988
MM
201
202/* Semantics of the small data area */
203enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
204
205/* Which small data model to use */
815cdc52 206const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
207
208/* Counter for labels which are to be placed in .fixup. */
209int fixuplabelno = 0;
874a0744 210#endif
4697a36c 211
c4501e62
JJ
212/* Bit size of immediate TLS offsets and string from which it is decoded. */
213int rs6000_tls_size = 32;
214const char *rs6000_tls_size_string;
215
b6c9286a
MM
216/* ABI enumeration available for subtarget to use. */
217enum rs6000_abi rs6000_current_abi;
218
85b776df
AM
219/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
220int dot_symbols;
221
38c1f2d7 222/* Debug flags */
815cdc52 223const char *rs6000_debug_name;
38c1f2d7
MM
224int rs6000_debug_stack; /* debug stack applications */
225int rs6000_debug_arg; /* debug argument handling */
226
aabcd309 227/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
228bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
229
58646b77
PB
230/* Built in types. */
231
232tree rs6000_builtin_types[RS6000_BTI_MAX];
233tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 234
57ac7be9
AM
235const char *rs6000_traceback_name;
236static enum {
237 traceback_default = 0,
238 traceback_none,
239 traceback_part,
240 traceback_full
241} rs6000_traceback;
242
38c1f2d7
MM
243/* Flag to say the TOC is initialized */
244int toc_initialized;
9ebbca7d 245char toc_label_name[10];
38c1f2d7 246
44cd321e
PS
247/* Cached value of rs6000_variable_issue. This is cached in
248 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
249static short cached_can_issue_more;
250
d6b5193b
RS
251static GTY(()) section *read_only_data_section;
252static GTY(()) section *private_data_section;
253static GTY(()) section *read_only_private_data_section;
254static GTY(()) section *sdata2_section;
255static GTY(()) section *toc_section;
256
a3c9585f
KH
257/* Control alignment for fields within structures. */
258/* String from -malign-XXXXX. */
025d9908
KH
259int rs6000_alignment_flags;
260
78f5898b
AH
261/* True for any options that were explicitly set. */
262struct {
df01da37 263 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 264 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
265 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
266 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
267 bool spe; /* True if -mspe= was used. */
268 bool float_gprs; /* True if -mfloat-gprs= was used. */
269 bool isel; /* True if -misel was used. */
270 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 271 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 272 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
273} rs6000_explicit_options;
274
a3170dc6
AH
275struct builtin_description
276{
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
280 unsigned int mask;
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
284};
8b897cfa
RS
285\f
286/* Target cpu costs. */
287
288struct processor_costs {
c4ad648e 289 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
299 const int cache_line_size; /* cache line size in bytes. */
300 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
301 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
302 const int simultaneous_prefetches; /* number of parallel prefetch
303 operations. */
8b897cfa
RS
304};
305
306const struct processor_costs *rs6000_cost;
307
308/* Processor costs (relative to an add) */
309
310/* Instruction size costs on 32bit processors. */
311static const
312struct processor_costs size32_cost = {
06a67bdd
RS
313 COSTS_N_INSNS (1), /* mulsi */
314 COSTS_N_INSNS (1), /* mulsi_const */
315 COSTS_N_INSNS (1), /* mulsi_const9 */
316 COSTS_N_INSNS (1), /* muldi */
317 COSTS_N_INSNS (1), /* divsi */
318 COSTS_N_INSNS (1), /* divdi */
319 COSTS_N_INSNS (1), /* fp */
320 COSTS_N_INSNS (1), /* dmul */
321 COSTS_N_INSNS (1), /* sdiv */
322 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
323 32,
324 0,
325 0,
5f732aba 326 0,
8b897cfa
RS
327};
328
329/* Instruction size costs on 64bit processors. */
330static const
331struct processor_costs size64_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (1), /* mulsi */
333 COSTS_N_INSNS (1), /* mulsi_const */
334 COSTS_N_INSNS (1), /* mulsi_const9 */
335 COSTS_N_INSNS (1), /* muldi */
336 COSTS_N_INSNS (1), /* divsi */
337 COSTS_N_INSNS (1), /* divdi */
338 COSTS_N_INSNS (1), /* fp */
339 COSTS_N_INSNS (1), /* dmul */
340 COSTS_N_INSNS (1), /* sdiv */
341 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
342 128,
343 0,
344 0,
5f732aba 345 0,
8b897cfa
RS
346};
347
348/* Instruction costs on RIOS1 processors. */
349static const
350struct processor_costs rios1_cost = {
06a67bdd
RS
351 COSTS_N_INSNS (5), /* mulsi */
352 COSTS_N_INSNS (4), /* mulsi_const */
353 COSTS_N_INSNS (3), /* mulsi_const9 */
354 COSTS_N_INSNS (5), /* muldi */
355 COSTS_N_INSNS (19), /* divsi */
356 COSTS_N_INSNS (19), /* divdi */
357 COSTS_N_INSNS (2), /* fp */
358 COSTS_N_INSNS (2), /* dmul */
359 COSTS_N_INSNS (19), /* sdiv */
360 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 361 128, /* cache line size */
5f732aba
DE
362 64, /* l1 cache */
363 512, /* l2 cache */
0b11da67 364 0, /* streams */
8b897cfa
RS
365};
366
367/* Instruction costs on RIOS2 processors. */
368static const
369struct processor_costs rios2_cost = {
06a67bdd
RS
370 COSTS_N_INSNS (2), /* mulsi */
371 COSTS_N_INSNS (2), /* mulsi_const */
372 COSTS_N_INSNS (2), /* mulsi_const9 */
373 COSTS_N_INSNS (2), /* muldi */
374 COSTS_N_INSNS (13), /* divsi */
375 COSTS_N_INSNS (13), /* divdi */
376 COSTS_N_INSNS (2), /* fp */
377 COSTS_N_INSNS (2), /* dmul */
378 COSTS_N_INSNS (17), /* sdiv */
379 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 380 256, /* cache line size */
5f732aba
DE
381 256, /* l1 cache */
382 1024, /* l2 cache */
0b11da67 383 0, /* streams */
8b897cfa
RS
384};
385
386/* Instruction costs on RS64A processors. */
387static const
388struct processor_costs rs64a_cost = {
06a67bdd
RS
389 COSTS_N_INSNS (20), /* mulsi */
390 COSTS_N_INSNS (12), /* mulsi_const */
391 COSTS_N_INSNS (8), /* mulsi_const9 */
392 COSTS_N_INSNS (34), /* muldi */
393 COSTS_N_INSNS (65), /* divsi */
394 COSTS_N_INSNS (67), /* divdi */
395 COSTS_N_INSNS (4), /* fp */
396 COSTS_N_INSNS (4), /* dmul */
397 COSTS_N_INSNS (31), /* sdiv */
398 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 399 128, /* cache line size */
5f732aba
DE
400 128, /* l1 cache */
401 2048, /* l2 cache */
0b11da67 402 1, /* streams */
8b897cfa
RS
403};
404
405/* Instruction costs on MPCCORE processors. */
406static const
407struct processor_costs mpccore_cost = {
06a67bdd
RS
408 COSTS_N_INSNS (2), /* mulsi */
409 COSTS_N_INSNS (2), /* mulsi_const */
410 COSTS_N_INSNS (2), /* mulsi_const9 */
411 COSTS_N_INSNS (2), /* muldi */
412 COSTS_N_INSNS (6), /* divsi */
413 COSTS_N_INSNS (6), /* divdi */
414 COSTS_N_INSNS (4), /* fp */
415 COSTS_N_INSNS (5), /* dmul */
416 COSTS_N_INSNS (10), /* sdiv */
417 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 418 32, /* cache line size */
5f732aba
DE
419 4, /* l1 cache */
420 16, /* l2 cache */
0b11da67 421 1, /* streams */
8b897cfa
RS
422};
423
424/* Instruction costs on PPC403 processors. */
425static const
426struct processor_costs ppc403_cost = {
06a67bdd
RS
427 COSTS_N_INSNS (4), /* mulsi */
428 COSTS_N_INSNS (4), /* mulsi_const */
429 COSTS_N_INSNS (4), /* mulsi_const9 */
430 COSTS_N_INSNS (4), /* muldi */
431 COSTS_N_INSNS (33), /* divsi */
432 COSTS_N_INSNS (33), /* divdi */
433 COSTS_N_INSNS (11), /* fp */
434 COSTS_N_INSNS (11), /* dmul */
435 COSTS_N_INSNS (11), /* sdiv */
436 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 437 32, /* cache line size */
5f732aba
DE
438 4, /* l1 cache */
439 16, /* l2 cache */
0b11da67 440 1, /* streams */
8b897cfa
RS
441};
442
443/* Instruction costs on PPC405 processors. */
444static const
445struct processor_costs ppc405_cost = {
06a67bdd
RS
446 COSTS_N_INSNS (5), /* mulsi */
447 COSTS_N_INSNS (4), /* mulsi_const */
448 COSTS_N_INSNS (3), /* mulsi_const9 */
449 COSTS_N_INSNS (5), /* muldi */
450 COSTS_N_INSNS (35), /* divsi */
451 COSTS_N_INSNS (35), /* divdi */
452 COSTS_N_INSNS (11), /* fp */
453 COSTS_N_INSNS (11), /* dmul */
454 COSTS_N_INSNS (11), /* sdiv */
455 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 456 32, /* cache line size */
5f732aba
DE
457 16, /* l1 cache */
458 128, /* l2 cache */
0b11da67 459 1, /* streams */
8b897cfa
RS
460};
461
462/* Instruction costs on PPC440 processors. */
463static const
464struct processor_costs ppc440_cost = {
06a67bdd
RS
465 COSTS_N_INSNS (3), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (3), /* muldi */
469 COSTS_N_INSNS (34), /* divsi */
470 COSTS_N_INSNS (34), /* divdi */
471 COSTS_N_INSNS (5), /* fp */
472 COSTS_N_INSNS (5), /* dmul */
473 COSTS_N_INSNS (19), /* sdiv */
474 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 475 32, /* cache line size */
5f732aba
DE
476 32, /* l1 cache */
477 256, /* l2 cache */
0b11da67 478 1, /* streams */
8b897cfa
RS
479};
480
481/* Instruction costs on PPC601 processors. */
482static const
483struct processor_costs ppc601_cost = {
06a67bdd
RS
484 COSTS_N_INSNS (5), /* mulsi */
485 COSTS_N_INSNS (5), /* mulsi_const */
486 COSTS_N_INSNS (5), /* mulsi_const9 */
487 COSTS_N_INSNS (5), /* muldi */
488 COSTS_N_INSNS (36), /* divsi */
489 COSTS_N_INSNS (36), /* divdi */
490 COSTS_N_INSNS (4), /* fp */
491 COSTS_N_INSNS (5), /* dmul */
492 COSTS_N_INSNS (17), /* sdiv */
493 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 494 32, /* cache line size */
5f732aba
DE
495 32, /* l1 cache */
496 256, /* l2 cache */
0b11da67 497 1, /* streams */
8b897cfa
RS
498};
499
500/* Instruction costs on PPC603 processors. */
501static const
502struct processor_costs ppc603_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (3), /* mulsi_const */
505 COSTS_N_INSNS (2), /* mulsi_const9 */
506 COSTS_N_INSNS (5), /* muldi */
507 COSTS_N_INSNS (37), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (4), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 513 32, /* cache line size */
5f732aba
DE
514 8, /* l1 cache */
515 64, /* l2 cache */
0b11da67 516 1, /* streams */
8b897cfa
RS
517};
518
519/* Instruction costs on PPC604 processors. */
520static const
521struct processor_costs ppc604_cost = {
06a67bdd
RS
522 COSTS_N_INSNS (4), /* mulsi */
523 COSTS_N_INSNS (4), /* mulsi_const */
524 COSTS_N_INSNS (4), /* mulsi_const9 */
525 COSTS_N_INSNS (4), /* muldi */
526 COSTS_N_INSNS (20), /* divsi */
527 COSTS_N_INSNS (20), /* divdi */
528 COSTS_N_INSNS (3), /* fp */
529 COSTS_N_INSNS (3), /* dmul */
530 COSTS_N_INSNS (18), /* sdiv */
531 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 532 32, /* cache line size */
5f732aba
DE
533 16, /* l1 cache */
534 512, /* l2 cache */
0b11da67 535 1, /* streams */
8b897cfa
RS
536};
537
538/* Instruction costs on PPC604e processors. */
539static const
540struct processor_costs ppc604e_cost = {
06a67bdd
RS
541 COSTS_N_INSNS (2), /* mulsi */
542 COSTS_N_INSNS (2), /* mulsi_const */
543 COSTS_N_INSNS (2), /* mulsi_const9 */
544 COSTS_N_INSNS (2), /* muldi */
545 COSTS_N_INSNS (20), /* divsi */
546 COSTS_N_INSNS (20), /* divdi */
547 COSTS_N_INSNS (3), /* fp */
548 COSTS_N_INSNS (3), /* dmul */
549 COSTS_N_INSNS (18), /* sdiv */
550 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 551 32, /* cache line size */
5f732aba
DE
552 32, /* l1 cache */
553 1024, /* l2 cache */
0b11da67 554 1, /* streams */
8b897cfa
RS
555};
556
f0517163 557/* Instruction costs on PPC620 processors. */
8b897cfa
RS
558static const
559struct processor_costs ppc620_cost = {
06a67bdd
RS
560 COSTS_N_INSNS (5), /* mulsi */
561 COSTS_N_INSNS (4), /* mulsi_const */
562 COSTS_N_INSNS (3), /* mulsi_const9 */
563 COSTS_N_INSNS (7), /* muldi */
564 COSTS_N_INSNS (21), /* divsi */
565 COSTS_N_INSNS (37), /* divdi */
566 COSTS_N_INSNS (3), /* fp */
567 COSTS_N_INSNS (3), /* dmul */
568 COSTS_N_INSNS (18), /* sdiv */
569 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 570 128, /* cache line size */
5f732aba
DE
571 32, /* l1 cache */
572 1024, /* l2 cache */
0b11da67 573 1, /* streams */
f0517163
RS
574};
575
576/* Instruction costs on PPC630 processors. */
577static const
578struct processor_costs ppc630_cost = {
06a67bdd
RS
579 COSTS_N_INSNS (5), /* mulsi */
580 COSTS_N_INSNS (4), /* mulsi_const */
581 COSTS_N_INSNS (3), /* mulsi_const9 */
582 COSTS_N_INSNS (7), /* muldi */
583 COSTS_N_INSNS (21), /* divsi */
584 COSTS_N_INSNS (37), /* divdi */
585 COSTS_N_INSNS (3), /* fp */
586 COSTS_N_INSNS (3), /* dmul */
587 COSTS_N_INSNS (17), /* sdiv */
588 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 589 128, /* cache line size */
5f732aba
DE
590 64, /* l1 cache */
591 1024, /* l2 cache */
0b11da67 592 1, /* streams */
8b897cfa
RS
593};
594
d296e02e
AP
595/* Instruction costs on Cell processor. */
596/* COSTS_N_INSNS (1) ~ one add. */
597static const
598struct processor_costs ppccell_cost = {
599 COSTS_N_INSNS (9/2)+2, /* mulsi */
600 COSTS_N_INSNS (6/2), /* mulsi_const */
601 COSTS_N_INSNS (6/2), /* mulsi_const9 */
602 COSTS_N_INSNS (15/2)+2, /* muldi */
603 COSTS_N_INSNS (38/2), /* divsi */
604 COSTS_N_INSNS (70/2), /* divdi */
605 COSTS_N_INSNS (10/2), /* fp */
606 COSTS_N_INSNS (10/2), /* dmul */
607 COSTS_N_INSNS (74/2), /* sdiv */
608 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 609 128, /* cache line size */
5f732aba
DE
610 32, /* l1 cache */
611 512, /* l2 cache */
612 6, /* streams */
d296e02e
AP
613};
614
8b897cfa
RS
615/* Instruction costs on PPC750 and PPC7400 processors. */
616static const
617struct processor_costs ppc750_cost = {
06a67bdd
RS
618 COSTS_N_INSNS (5), /* mulsi */
619 COSTS_N_INSNS (3), /* mulsi_const */
620 COSTS_N_INSNS (2), /* mulsi_const9 */
621 COSTS_N_INSNS (5), /* muldi */
622 COSTS_N_INSNS (17), /* divsi */
623 COSTS_N_INSNS (17), /* divdi */
624 COSTS_N_INSNS (3), /* fp */
625 COSTS_N_INSNS (3), /* dmul */
626 COSTS_N_INSNS (17), /* sdiv */
627 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 628 32, /* cache line size */
5f732aba
DE
629 32, /* l1 cache */
630 512, /* l2 cache */
0b11da67 631 1, /* streams */
8b897cfa
RS
632};
633
634/* Instruction costs on PPC7450 processors. */
635static const
636struct processor_costs ppc7450_cost = {
06a67bdd
RS
637 COSTS_N_INSNS (4), /* mulsi */
638 COSTS_N_INSNS (3), /* mulsi_const */
639 COSTS_N_INSNS (3), /* mulsi_const9 */
640 COSTS_N_INSNS (4), /* muldi */
641 COSTS_N_INSNS (23), /* divsi */
642 COSTS_N_INSNS (23), /* divdi */
643 COSTS_N_INSNS (5), /* fp */
644 COSTS_N_INSNS (5), /* dmul */
645 COSTS_N_INSNS (21), /* sdiv */
646 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 647 32, /* cache line size */
5f732aba
DE
648 32, /* l1 cache */
649 1024, /* l2 cache */
0b11da67 650 1, /* streams */
8b897cfa 651};
a3170dc6 652
8b897cfa
RS
653/* Instruction costs on PPC8540 processors. */
654static const
655struct processor_costs ppc8540_cost = {
06a67bdd
RS
656 COSTS_N_INSNS (4), /* mulsi */
657 COSTS_N_INSNS (4), /* mulsi_const */
658 COSTS_N_INSNS (4), /* mulsi_const9 */
659 COSTS_N_INSNS (4), /* muldi */
660 COSTS_N_INSNS (19), /* divsi */
661 COSTS_N_INSNS (19), /* divdi */
662 COSTS_N_INSNS (4), /* fp */
663 COSTS_N_INSNS (4), /* dmul */
664 COSTS_N_INSNS (29), /* sdiv */
665 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 666 32, /* cache line size */
5f732aba
DE
667 32, /* l1 cache */
668 256, /* l2 cache */
0b11da67 669 1, /* prefetch streams /*/
8b897cfa
RS
670};
671
fa41c305
EW
672/* Instruction costs on E300C2 and E300C3 cores. */
673static const
674struct processor_costs ppce300c2c3_cost = {
675 COSTS_N_INSNS (4), /* mulsi */
676 COSTS_N_INSNS (4), /* mulsi_const */
677 COSTS_N_INSNS (4), /* mulsi_const9 */
678 COSTS_N_INSNS (4), /* muldi */
679 COSTS_N_INSNS (19), /* divsi */
680 COSTS_N_INSNS (19), /* divdi */
681 COSTS_N_INSNS (3), /* fp */
682 COSTS_N_INSNS (4), /* dmul */
683 COSTS_N_INSNS (18), /* sdiv */
684 COSTS_N_INSNS (33), /* ddiv */
642639ce 685 32,
a19b7d46
EW
686 16, /* l1 cache */
687 16, /* l2 cache */
642639ce 688 1, /* prefetch streams /*/
fa41c305
EW
689};
690
8b897cfa
RS
691/* Instruction costs on POWER4 and POWER5 processors. */
692static const
693struct processor_costs power4_cost = {
06a67bdd
RS
694 COSTS_N_INSNS (3), /* mulsi */
695 COSTS_N_INSNS (2), /* mulsi_const */
696 COSTS_N_INSNS (2), /* mulsi_const9 */
697 COSTS_N_INSNS (4), /* muldi */
698 COSTS_N_INSNS (18), /* divsi */
699 COSTS_N_INSNS (34), /* divdi */
700 COSTS_N_INSNS (3), /* fp */
701 COSTS_N_INSNS (3), /* dmul */
702 COSTS_N_INSNS (17), /* sdiv */
703 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 704 128, /* cache line size */
5f732aba
DE
705 32, /* l1 cache */
706 1024, /* l2 cache */
0b11da67 707 8, /* prefetch streams /*/
8b897cfa
RS
708};
709
44cd321e
PS
710/* Instruction costs on POWER6 processors. */
711static const
712struct processor_costs power6_cost = {
713 COSTS_N_INSNS (8), /* mulsi */
714 COSTS_N_INSNS (8), /* mulsi_const */
715 COSTS_N_INSNS (8), /* mulsi_const9 */
716 COSTS_N_INSNS (8), /* muldi */
717 COSTS_N_INSNS (22), /* divsi */
718 COSTS_N_INSNS (28), /* divdi */
719 COSTS_N_INSNS (3), /* fp */
720 COSTS_N_INSNS (3), /* dmul */
721 COSTS_N_INSNS (13), /* sdiv */
722 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 723 128, /* cache line size */
5f732aba
DE
724 64, /* l1 cache */
725 2048, /* l2 cache */
0b11da67 726 16, /* prefetch streams */
44cd321e
PS
727};
728
8b897cfa 729\f
a2369ed3 730static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 731static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 732static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
733static void rs6000_emit_stack_tie (void);
734static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 735static bool spe_func_has_64bit_regs_p (void);
b20a9cca 736static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 737 int, HOST_WIDE_INT);
a2369ed3
DJ
738static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
739static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
740static unsigned rs6000_hash_constant (rtx);
741static unsigned toc_hash_function (const void *);
742static int toc_hash_eq (const void *, const void *);
743static int constant_pool_expr_1 (rtx, int *, int *);
744static bool constant_pool_expr_p (rtx);
d04b6e6e 745static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
746static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
747static struct machine_function * rs6000_init_machine_status (void);
748static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 749static bool no_global_regs_above (int);
5add3202 750#ifdef HAVE_GAS_HIDDEN
a2369ed3 751static void rs6000_assemble_visibility (tree, int);
5add3202 752#endif
a2369ed3
DJ
753static int rs6000_ra_ever_killed (void);
754static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 755static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 756static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 757static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 758static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 759static const char *rs6000_mangle_type (const_tree);
b86fe7b4 760extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 761static void rs6000_set_default_type_attributes (tree);
52ff33d0 762static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
763static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
764static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
765static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
766 tree);
a2369ed3 767static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 768static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 769static void rs6000_file_start (void);
7c262518 770#if TARGET_ELF
9b580a0b 771static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
772static void rs6000_elf_asm_out_constructor (rtx, int);
773static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 774static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 775static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
776static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
777 unsigned HOST_WIDE_INT);
a56d7372 778static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 779 ATTRIBUTE_UNUSED;
7c262518 780#endif
3101faab 781static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
782static void rs6000_alloc_sdmode_stack_slot (void);
783static void rs6000_instantiate_decls (void);
cbaaba19 784#if TARGET_XCOFF
0d5817b2 785static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 786static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 787static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 788static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 789static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 790static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 791 unsigned HOST_WIDE_INT);
d6b5193b
RS
792static void rs6000_xcoff_unique_section (tree, int);
793static section *rs6000_xcoff_select_rtx_section
794 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
795static const char * rs6000_xcoff_strip_name_encoding (const char *);
796static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
797static void rs6000_xcoff_file_start (void);
798static void rs6000_xcoff_file_end (void);
f1384257 799#endif
a2369ed3
DJ
800static int rs6000_variable_issue (FILE *, int, rtx, int);
801static bool rs6000_rtx_costs (rtx, int, int, int *);
802static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 803static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 804static bool is_microcoded_insn (rtx);
d296e02e 805static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
806static bool is_cracked_insn (rtx);
807static bool is_branch_slot_insn (rtx);
44cd321e 808static bool is_load_insn (rtx);
e3a0e200 809static rtx get_store_dest (rtx pat);
44cd321e
PS
810static bool is_store_insn (rtx);
811static bool set_to_load_agen (rtx,rtx);
982afe02 812static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
813static int rs6000_adjust_priority (rtx, int);
814static int rs6000_issue_rate (void);
b198261f 815static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
816static rtx get_next_active_insn (rtx, rtx);
817static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
818static bool insn_must_be_first_in_group (rtx);
819static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
820static bool is_costly_group (rtx *, rtx);
821static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
822static int redefine_groups (FILE *, int, rtx, rtx);
823static int pad_groups (FILE *, int, rtx, rtx);
824static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
825static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
826static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 827static int rs6000_use_sched_lookahead (void);
d296e02e 828static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 829static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 830static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
831static tree rs6000_builtin_mul_widen_even (tree);
832static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 833static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 834
58646b77 835static void def_builtin (int, const char *, tree, int);
3101faab 836static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
837static void rs6000_init_builtins (void);
838static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
839static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
840static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
841static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
842static void altivec_init_builtins (void);
843static void rs6000_common_init_builtins (void);
c15c90bb 844static void rs6000_init_libfuncs (void);
a2369ed3 845
96038623
DE
846static void paired_init_builtins (void);
847static rtx paired_expand_builtin (tree, rtx, bool *);
848static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
849static rtx paired_expand_stv_builtin (enum insn_code, tree);
850static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
851
b20a9cca
AM
852static void enable_mask_for_builtins (struct builtin_description *, int,
853 enum rs6000_builtins,
854 enum rs6000_builtins);
7c62e993 855static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
856static void spe_init_builtins (void);
857static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 858static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
859static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
860static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
861static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
862static rs6000_stack_t *rs6000_stack_info (void);
863static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
864
865static rtx altivec_expand_builtin (tree, rtx, bool *);
866static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
867static rtx altivec_expand_st_builtin (tree, rtx, bool *);
868static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
869static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 870static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 871 const char *, tree, rtx);
b4a62fa0 872static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 873static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
874static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
875static rtx altivec_expand_vec_set_builtin (tree);
876static rtx altivec_expand_vec_ext_builtin (tree, rtx);
877static int get_element_number (tree, tree);
78f5898b 878static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 879static void rs6000_parse_tls_size_option (void);
5da702b1 880static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
881static int first_altivec_reg_to_save (void);
882static unsigned int compute_vrsave_mask (void);
9390387d 883static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
884static void is_altivec_return_reg (rtx, void *);
885static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
886int easy_vector_constant (rtx, enum machine_mode);
3101faab 887static bool rs6000_is_opaque_type (const_tree);
a2369ed3 888static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 889static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 890static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 891static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
892static rtx rs6000_tls_get_addr (void);
893static rtx rs6000_got_sym (void);
9390387d 894static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
895static const char *rs6000_get_some_local_dynamic_name (void);
896static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 897static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 898static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 899 enum machine_mode, tree);
0b5383eb
DJ
900static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
901 HOST_WIDE_INT);
902static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
903 tree, HOST_WIDE_INT);
904static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
905 HOST_WIDE_INT,
906 rtx[], int *);
907static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
908 const_tree, HOST_WIDE_INT,
909 rtx[], int *);
910static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 911static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 912static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
913static void setup_incoming_varargs (CUMULATIVE_ARGS *,
914 enum machine_mode, tree,
915 int *, int);
8cd5a4e0 916static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 917 const_tree, bool);
78a52f11
RH
918static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
919 tree, bool);
3101faab 920static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
921#if TARGET_MACHO
922static void macho_branch_islands (void);
efdba735
SH
923static int no_previous_def (tree function_name);
924static tree get_prev_label (tree function_name);
c4e18b1c 925static void rs6000_darwin_file_start (void);
efdba735
SH
926#endif
927
c35d187f 928static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 929static void rs6000_va_start (tree, rtx);
23a60a04 930static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 931static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 932static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 933static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 934static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 935 enum machine_mode);
94ff898d 936static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
937 enum machine_mode);
938static int get_vsel_insn (enum machine_mode);
939static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 940static tree rs6000_stack_protect_fail (void);
21213b4c
DP
941
942const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
943static enum machine_mode rs6000_eh_return_filter_mode (void);
944
17211ab5
GK
945/* Hash table stuff for keeping track of TOC entries. */
946
947struct toc_hash_struct GTY(())
948{
949 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
950 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
951 rtx key;
952 enum machine_mode key_mode;
953 int labelno;
954};
955
956static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
957\f
958/* Default register names. */
959char rs6000_reg_names[][8] =
960{
802a0058
MM
961 "0", "1", "2", "3", "4", "5", "6", "7",
962 "8", "9", "10", "11", "12", "13", "14", "15",
963 "16", "17", "18", "19", "20", "21", "22", "23",
964 "24", "25", "26", "27", "28", "29", "30", "31",
965 "0", "1", "2", "3", "4", "5", "6", "7",
966 "8", "9", "10", "11", "12", "13", "14", "15",
967 "16", "17", "18", "19", "20", "21", "22", "23",
968 "24", "25", "26", "27", "28", "29", "30", "31",
969 "mq", "lr", "ctr","ap",
970 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
971 "xer",
972 /* AltiVec registers. */
0cd5e3a1
AH
973 "0", "1", "2", "3", "4", "5", "6", "7",
974 "8", "9", "10", "11", "12", "13", "14", "15",
975 "16", "17", "18", "19", "20", "21", "22", "23",
976 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
977 "vrsave", "vscr",
978 /* SPE registers. */
7d5175e1
JJ
979 "spe_acc", "spefscr",
980 /* Soft frame pointer. */
981 "sfp"
c81bebd7
MM
982};
983
984#ifdef TARGET_REGNAMES
8b60264b 985static const char alt_reg_names[][8] =
c81bebd7 986{
802a0058
MM
987 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
988 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
989 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
990 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
991 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
992 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
993 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
994 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
995 "mq", "lr", "ctr", "ap",
996 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 997 "xer",
59a4c851 998 /* AltiVec registers. */
0ac081f6 999 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1000 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1001 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1002 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1003 "vrsave", "vscr",
1004 /* SPE registers. */
7d5175e1
JJ
1005 "spe_acc", "spefscr",
1006 /* Soft frame pointer. */
1007 "sfp"
c81bebd7
MM
1008};
1009#endif
9878760c 1010\f
daf11973
MM
1011#ifndef MASK_STRICT_ALIGN
1012#define MASK_STRICT_ALIGN 0
1013#endif
ffcfcb5f
AM
1014#ifndef TARGET_PROFILE_KERNEL
1015#define TARGET_PROFILE_KERNEL 0
1016#endif
3961e8fe
RH
1017
1018/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1019#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1020\f
1021/* Initialize the GCC target structure. */
91d231cb
JM
1022#undef TARGET_ATTRIBUTE_TABLE
1023#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1024#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1025#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1026
301d03af
RS
1027#undef TARGET_ASM_ALIGNED_DI_OP
1028#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1029
1030/* Default unaligned ops are only provided for ELF. Find the ops needed
1031 for non-ELF systems. */
1032#ifndef OBJECT_FORMAT_ELF
cbaaba19 1033#if TARGET_XCOFF
ae6c1efd 1034/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1035 64-bit targets. */
1036#undef TARGET_ASM_UNALIGNED_HI_OP
1037#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1038#undef TARGET_ASM_UNALIGNED_SI_OP
1039#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1040#undef TARGET_ASM_UNALIGNED_DI_OP
1041#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1042#else
1043/* For Darwin. */
1044#undef TARGET_ASM_UNALIGNED_HI_OP
1045#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1046#undef TARGET_ASM_UNALIGNED_SI_OP
1047#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1048#undef TARGET_ASM_UNALIGNED_DI_OP
1049#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1050#undef TARGET_ASM_ALIGNED_DI_OP
1051#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1052#endif
1053#endif
1054
1055/* This hook deals with fixups for relocatable code and DI-mode objects
1056 in 64-bit code. */
1057#undef TARGET_ASM_INTEGER
1058#define TARGET_ASM_INTEGER rs6000_assemble_integer
1059
93638d7a
AM
1060#ifdef HAVE_GAS_HIDDEN
1061#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1062#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1063#endif
1064
c4501e62
JJ
1065#undef TARGET_HAVE_TLS
1066#define TARGET_HAVE_TLS HAVE_AS_TLS
1067
1068#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1069#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1070
08c148a8
NB
1071#undef TARGET_ASM_FUNCTION_PROLOGUE
1072#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1073#undef TARGET_ASM_FUNCTION_EPILOGUE
1074#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1075
b54cf83a
DE
1076#undef TARGET_SCHED_VARIABLE_ISSUE
1077#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1078
c237e94a
ZW
1079#undef TARGET_SCHED_ISSUE_RATE
1080#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1081#undef TARGET_SCHED_ADJUST_COST
1082#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1083#undef TARGET_SCHED_ADJUST_PRIORITY
1084#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1085#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1086#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1087#undef TARGET_SCHED_INIT
1088#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1089#undef TARGET_SCHED_FINISH
1090#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1091#undef TARGET_SCHED_REORDER
1092#define TARGET_SCHED_REORDER rs6000_sched_reorder
1093#undef TARGET_SCHED_REORDER2
1094#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1095
be12c2b0
VM
1096#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1097#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1098
d296e02e
AP
1099#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1100#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1101
7ccf35ed
DN
1102#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1103#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1104#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1105#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1106#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1107#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1108#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1109#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1110
5b900a4c
DN
1111#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1112#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1113
0ac081f6
AH
1114#undef TARGET_INIT_BUILTINS
1115#define TARGET_INIT_BUILTINS rs6000_init_builtins
1116
1117#undef TARGET_EXPAND_BUILTIN
1118#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1119
608063c3
JB
1120#undef TARGET_MANGLE_TYPE
1121#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1122
c15c90bb
ZW
1123#undef TARGET_INIT_LIBFUNCS
1124#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1125
f1384257 1126#if TARGET_MACHO
0e5dbd9b 1127#undef TARGET_BINDS_LOCAL_P
31920d83 1128#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1129#endif
0e5dbd9b 1130
77ccdfed
EC
1131#undef TARGET_MS_BITFIELD_LAYOUT_P
1132#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1133
3961e8fe
RH
1134#undef TARGET_ASM_OUTPUT_MI_THUNK
1135#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1136
3961e8fe 1137#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1138#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1139
4977bab6
ZW
1140#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1141#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1142
2e3f0db6
DJ
1143#undef TARGET_INVALID_WITHIN_DOLOOP
1144#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1145
3c50106f
RH
1146#undef TARGET_RTX_COSTS
1147#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1148#undef TARGET_ADDRESS_COST
1149#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1150
c8e4f0e9 1151#undef TARGET_VECTOR_OPAQUE_P
58646b77 1152#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1153
96714395
AH
1154#undef TARGET_DWARF_REGISTER_SPAN
1155#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1156
37ea0b7e
JM
1157#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1158#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1159
c6e8c921
GK
1160/* On rs6000, function arguments are promoted, as are function return
1161 values. */
1162#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1163#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1164#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1165#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1166
c6e8c921
GK
1167#undef TARGET_RETURN_IN_MEMORY
1168#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1169
1170#undef TARGET_SETUP_INCOMING_VARARGS
1171#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1172
1173/* Always strict argument naming on rs6000. */
1174#undef TARGET_STRICT_ARGUMENT_NAMING
1175#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1176#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1177#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1178#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1179#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1180#undef TARGET_MUST_PASS_IN_STACK
1181#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1182#undef TARGET_PASS_BY_REFERENCE
1183#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1184#undef TARGET_ARG_PARTIAL_BYTES
1185#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1186
c35d187f
RH
1187#undef TARGET_BUILD_BUILTIN_VA_LIST
1188#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1189
d7bd8aeb
JJ
1190#undef TARGET_EXPAND_BUILTIN_VA_START
1191#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1192
cd3ce9b4
JM
1193#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1194#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1195
93f90be6
FJ
1196#undef TARGET_EH_RETURN_FILTER_MODE
1197#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1198
00b79d54
BE
1199#undef TARGET_SCALAR_MODE_SUPPORTED_P
1200#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1201
f676971a
EC
1202#undef TARGET_VECTOR_MODE_SUPPORTED_P
1203#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1204
4d3e6fae
FJ
1205#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1206#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1207
78f5898b
AH
1208#undef TARGET_HANDLE_OPTION
1209#define TARGET_HANDLE_OPTION rs6000_handle_option
1210
1211#undef TARGET_DEFAULT_TARGET_FLAGS
1212#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1213 (TARGET_DEFAULT)
78f5898b 1214
3aebbe5f
JJ
1215#undef TARGET_STACK_PROTECT_FAIL
1216#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1217
445cf5eb
JM
1218/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1219 The PowerPC architecture requires only weak consistency among
1220 processors--that is, memory accesses between processors need not be
1221 sequentially consistent and memory accesses among processors can occur
1222 in any order. The ability to order memory accesses weakly provides
1223 opportunities for more efficient use of the system bus. Unless a
1224 dependency exists, the 604e allows read operations to precede store
1225 operations. */
1226#undef TARGET_RELAXED_ORDERING
1227#define TARGET_RELAXED_ORDERING true
1228
fdbe66f2
EB
1229#ifdef HAVE_AS_TLS
1230#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1231#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1232#endif
1233
aacd3885
RS
1234/* Use a 32-bit anchor range. This leads to sequences like:
1235
1236 addis tmp,anchor,high
1237 add dest,tmp,low
1238
1239 where tmp itself acts as an anchor, and can be shared between
1240 accesses to the same 64k page. */
1241#undef TARGET_MIN_ANCHOR_OFFSET
1242#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1243#undef TARGET_MAX_ANCHOR_OFFSET
1244#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1245#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1246#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1247
9c78b944
DE
1248#undef TARGET_BUILTIN_RECIPROCAL
1249#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1250
e41b2a33
PB
1251#undef TARGET_EXPAND_TO_RTL_HOOK
1252#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1253
1254#undef TARGET_INSTANTIATE_DECLS
1255#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1256
f6897b10 1257struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1258\f
0d1fbc8c
AH
1259
1260/* Value is 1 if hard register REGNO can hold a value of machine-mode
1261 MODE. */
1262static int
1263rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1264{
1265 /* The GPRs can hold any mode, but values bigger than one register
1266 cannot go past R31. */
1267 if (INT_REGNO_P (regno))
1268 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1269
a5a97921 1270 /* The float registers can only hold floating modes and DImode.
7393f7f8 1271 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1272 if (FP_REGNO_P (regno))
1273 return
96038623 1274 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1275 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1276 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1277 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1278 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1279 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1280 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1281
1282 /* The CR register can only hold CC modes. */
1283 if (CR_REGNO_P (regno))
1284 return GET_MODE_CLASS (mode) == MODE_CC;
1285
1286 if (XER_REGNO_P (regno))
1287 return mode == PSImode;
1288
1289 /* AltiVec only in AldyVec registers. */
1290 if (ALTIVEC_REGNO_P (regno))
1291 return ALTIVEC_VECTOR_MODE (mode);
1292
1293 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1294 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1295 return 1;
1296
1297 /* We cannot put TImode anywhere except general register and it must be
1298 able to fit within the register set. */
1299
1300 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1301}
1302
1303/* Initialize rs6000_hard_regno_mode_ok_p table. */
1304static void
1305rs6000_init_hard_regno_mode_ok (void)
1306{
1307 int r, m;
1308
1309 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1310 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1311 if (rs6000_hard_regno_mode_ok (r, m))
1312 rs6000_hard_regno_mode_ok_p[m][r] = true;
1313}
1314
e4cad568
GK
1315#if TARGET_MACHO
1316/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1317
1318static void
1319darwin_rs6000_override_options (void)
1320{
1321 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1322 off. */
1323 rs6000_altivec_abi = 1;
1324 TARGET_ALTIVEC_VRSAVE = 1;
1325 if (DEFAULT_ABI == ABI_DARWIN)
1326 {
1327 if (MACHO_DYNAMIC_NO_PIC_P)
1328 {
1329 if (flag_pic)
1330 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1331 flag_pic = 0;
1332 }
1333 else if (flag_pic == 1)
1334 {
1335 flag_pic = 2;
1336 }
1337 }
1338 if (TARGET_64BIT && ! TARGET_POWERPC64)
1339 {
1340 target_flags |= MASK_POWERPC64;
1341 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1342 }
1343 if (flag_mkernel)
1344 {
1345 rs6000_default_long_calls = 1;
1346 target_flags |= MASK_SOFT_FLOAT;
1347 }
1348
1349 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1350 Altivec. */
1351 if (!flag_mkernel && !flag_apple_kext
1352 && TARGET_64BIT
1353 && ! (target_flags_explicit & MASK_ALTIVEC))
1354 target_flags |= MASK_ALTIVEC;
1355
1356 /* Unless the user (not the configurer) has explicitly overridden
1357 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1358 G4 unless targetting the kernel. */
1359 if (!flag_mkernel
1360 && !flag_apple_kext
1361 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1362 && ! (target_flags_explicit & MASK_ALTIVEC)
1363 && ! rs6000_select[1].string)
1364 {
1365 target_flags |= MASK_ALTIVEC;
1366 }
1367}
1368#endif
1369
c1e55850
GK
1370/* If not otherwise specified by a target, make 'long double' equivalent to
1371 'double'. */
1372
1373#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1374#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1375#endif
1376
5248c961
RK
1377/* Override command line options. Mostly we process the processor
1378 type and sometimes adjust other TARGET_ options. */
1379
1380void
d779d0dc 1381rs6000_override_options (const char *default_cpu)
5248c961 1382{
c4d38ccb 1383 size_t i, j;
8e3f41e7 1384 struct rs6000_cpu_select *ptr;
66188a7e 1385 int set_masks;
5248c961 1386
66188a7e 1387 /* Simplifications for entries below. */
85638c0d 1388
66188a7e
GK
1389 enum {
1390 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1391 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1392 };
85638c0d 1393
66188a7e
GK
1394 /* This table occasionally claims that a processor does not support
1395 a particular feature even though it does, but the feature is slower
1396 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1397 complete description of the processor's support.
66188a7e
GK
1398
1399 Please keep this list in order, and don't forget to update the
1400 documentation in invoke.texi when adding a new processor or
1401 flag. */
5248c961
RK
1402 static struct ptt
1403 {
8b60264b
KG
1404 const char *const name; /* Canonical processor name. */
1405 const enum processor_type processor; /* Processor type enum value. */
1406 const int target_enable; /* Target flags to enable. */
8b60264b 1407 } const processor_target_table[]
66188a7e 1408 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1409 {"403", PROCESSOR_PPC403,
66188a7e 1410 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1411 {"405", PROCESSOR_PPC405,
716019c0
JM
1412 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1413 {"405fp", PROCESSOR_PPC405,
1414 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1415 {"440", PROCESSOR_PPC440,
716019c0
JM
1416 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1417 {"440fp", PROCESSOR_PPC440,
1418 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1419 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1420 {"601", PROCESSOR_PPC601,
66188a7e
GK
1421 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1422 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1423 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1424 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1425 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1426 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1427 {"620", PROCESSOR_PPC620,
1428 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1429 {"630", PROCESSOR_PPC630,
1430 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1431 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1432 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1433 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1434 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1435 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1436 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1437 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1438 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1439 /* 8548 has a dummy entry for now. */
a45bce6e 1440 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1441 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1442 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
66188a7e 1443 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1444 {"970", PROCESSOR_POWER4,
66188a7e 1445 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1446 {"cell", PROCESSOR_CELL,
1447 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1448 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1449 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1450 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1451 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1452 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1453 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1454 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1455 {"power2", PROCESSOR_POWER,
1456 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1457 {"power3", PROCESSOR_PPC630,
1458 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1459 {"power4", PROCESSOR_POWER4,
fc091c8e 1460 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1461 {"power5", PROCESSOR_POWER5,
432218ba
DE
1462 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1463 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1464 {"power5+", PROCESSOR_POWER5,
1465 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1466 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1467 {"power6", PROCESSOR_POWER6,
e118597e 1468 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1469 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1470 {"power6x", PROCESSOR_POWER6,
1471 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1472 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1473 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1474 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1476 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1477 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1478 {"rios2", PROCESSOR_RIOS2,
1479 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1480 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1481 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1482 {"rs64", PROCESSOR_RS64A,
1483 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1484 };
5248c961 1485
ca7558fc 1486 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1487
66188a7e
GK
1488 /* Some OSs don't support saving the high part of 64-bit registers on
1489 context switch. Other OSs don't support saving Altivec registers.
1490 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1491 settings; if the user wants either, the user must explicitly specify
1492 them and we won't interfere with the user's specification. */
1493
1494 enum {
1495 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1496 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1497 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1498 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1499 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1500 };
0d1fbc8c
AH
1501
1502 rs6000_init_hard_regno_mode_ok ();
1503
c4ad648e 1504 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1505#ifdef OS_MISSING_POWERPC64
1506 if (OS_MISSING_POWERPC64)
1507 set_masks &= ~MASK_POWERPC64;
1508#endif
1509#ifdef OS_MISSING_ALTIVEC
1510 if (OS_MISSING_ALTIVEC)
1511 set_masks &= ~MASK_ALTIVEC;
1512#endif
1513
768875a8
AM
1514 /* Don't override by the processor default if given explicitly. */
1515 set_masks &= ~target_flags_explicit;
957211c3 1516
a4f6c312 1517 /* Identify the processor type. */
8e3f41e7 1518 rs6000_select[0].string = default_cpu;
3cb999d8 1519 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1520
b6a1cbae 1521 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1522 {
8e3f41e7
MM
1523 ptr = &rs6000_select[i];
1524 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1525 {
8e3f41e7
MM
1526 for (j = 0; j < ptt_size; j++)
1527 if (! strcmp (ptr->string, processor_target_table[j].name))
1528 {
1529 if (ptr->set_tune_p)
1530 rs6000_cpu = processor_target_table[j].processor;
1531
1532 if (ptr->set_arch_p)
1533 {
66188a7e
GK
1534 target_flags &= ~set_masks;
1535 target_flags |= (processor_target_table[j].target_enable
1536 & set_masks);
8e3f41e7
MM
1537 }
1538 break;
1539 }
1540
4406229e 1541 if (j == ptt_size)
8e3f41e7 1542 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1543 }
1544 }
8a61d227 1545
993f19a8 1546 if (TARGET_E500)
a3170dc6
AH
1547 rs6000_isel = 1;
1548
fa41c305
EW
1549 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3)
1550 {
1551 if (TARGET_ALTIVEC)
1552 error ("AltiVec not supported in this target");
1553 if (TARGET_SPE)
1554 error ("Spe not supported in this target");
1555 }
1556
dff9f1b6
DE
1557 /* If we are optimizing big endian systems for space, use the load/store
1558 multiple and string instructions. */
ef792183 1559 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1560 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1561
a4f6c312
SS
1562 /* Don't allow -mmultiple or -mstring on little endian systems
1563 unless the cpu is a 750, because the hardware doesn't support the
1564 instructions used in little endian mode, and causes an alignment
1565 trap. The 750 does not cause an alignment trap (except when the
1566 target is unaligned). */
bef84347 1567
b21fb038 1568 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1569 {
1570 if (TARGET_MULTIPLE)
1571 {
1572 target_flags &= ~MASK_MULTIPLE;
b21fb038 1573 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1574 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1575 }
1576
1577 if (TARGET_STRING)
1578 {
1579 target_flags &= ~MASK_STRING;
b21fb038 1580 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1581 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1582 }
1583 }
3933e0e1 1584
38c1f2d7
MM
1585 /* Set debug flags */
1586 if (rs6000_debug_name)
1587 {
bfc79d3b 1588 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1589 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1590 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1591 rs6000_debug_stack = 1;
bfc79d3b 1592 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1593 rs6000_debug_arg = 1;
1594 else
c725bd79 1595 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1596 }
1597
57ac7be9
AM
1598 if (rs6000_traceback_name)
1599 {
1600 if (! strncmp (rs6000_traceback_name, "full", 4))
1601 rs6000_traceback = traceback_full;
1602 else if (! strncmp (rs6000_traceback_name, "part", 4))
1603 rs6000_traceback = traceback_part;
1604 else if (! strncmp (rs6000_traceback_name, "no", 2))
1605 rs6000_traceback = traceback_none;
1606 else
9e637a26 1607 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1608 rs6000_traceback_name);
1609 }
1610
78f5898b
AH
1611 if (!rs6000_explicit_options.long_double)
1612 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1613
602ea4d3 1614#ifndef POWERPC_LINUX
d3603e8c 1615 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1616 rs6000_ieeequad = 1;
1617#endif
1618
0db747be
DE
1619 /* Enable Altivec ABI for AIX -maltivec. */
1620 if (TARGET_XCOFF && TARGET_ALTIVEC)
1621 rs6000_altivec_abi = 1;
1622
a2db2771
JJ
1623 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1624 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1625 be explicitly overridden in either case. */
1626 if (TARGET_ELF)
6d0ef01e 1627 {
a2db2771
JJ
1628 if (!rs6000_explicit_options.altivec_abi
1629 && (TARGET_64BIT || TARGET_ALTIVEC))
1630 rs6000_altivec_abi = 1;
1631
1632 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1633 if (!rs6000_explicit_options.vrsave)
1634 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1635 }
1636
594a51fe
SS
1637 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1638 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1639 {
1640 rs6000_darwin64_abi = 1;
9c7956fd 1641#if TARGET_MACHO
6ac49599 1642 darwin_one_byte_bool = 1;
9c7956fd 1643#endif
d9168963
SS
1644 /* Default to natural alignment, for better performance. */
1645 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1646 }
1647
194c524a
DE
1648 /* Place FP constants in the constant pool instead of TOC
1649 if section anchors enabled. */
1650 if (flag_section_anchors)
1651 TARGET_NO_FP_IN_TOC = 1;
1652
c4501e62
JJ
1653 /* Handle -mtls-size option. */
1654 rs6000_parse_tls_size_option ();
1655
a7ae18e2
AH
1656#ifdef SUBTARGET_OVERRIDE_OPTIONS
1657 SUBTARGET_OVERRIDE_OPTIONS;
1658#endif
1659#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1660 SUBSUBTARGET_OVERRIDE_OPTIONS;
1661#endif
4d4cbc0e
AH
1662#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1663 SUB3TARGET_OVERRIDE_OPTIONS;
1664#endif
a7ae18e2 1665
5da702b1
AH
1666 if (TARGET_E500)
1667 {
1668 /* The e500 does not have string instructions, and we set
1669 MASK_STRING above when optimizing for size. */
1670 if ((target_flags & MASK_STRING) != 0)
1671 target_flags = target_flags & ~MASK_STRING;
1672 }
1673 else if (rs6000_select[1].string != NULL)
1674 {
1675 /* For the powerpc-eabispe configuration, we set all these by
1676 default, so let's unset them if we manually set another
1677 CPU that is not the E500. */
a2db2771 1678 if (!rs6000_explicit_options.spe_abi)
5da702b1 1679 rs6000_spe_abi = 0;
78f5898b 1680 if (!rs6000_explicit_options.spe)
5da702b1 1681 rs6000_spe = 0;
78f5898b 1682 if (!rs6000_explicit_options.float_gprs)
5da702b1 1683 rs6000_float_gprs = 0;
78f5898b 1684 if (!rs6000_explicit_options.isel)
5da702b1
AH
1685 rs6000_isel = 0;
1686 }
b5044283 1687
eca0d5e8
JM
1688 /* Detect invalid option combinations with E500. */
1689 CHECK_E500_OPTIONS;
1690
ec507f2d 1691 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1692 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1693 && rs6000_cpu != PROCESSOR_POWER6
1694 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1695 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1696 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1697 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1698 || rs6000_cpu == PROCESSOR_POWER5
1699 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1700
ec507f2d
DE
1701 rs6000_sched_restricted_insns_priority
1702 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1703
569fa502 1704 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1705 rs6000_sched_costly_dep
1706 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1707
569fa502
DN
1708 if (rs6000_sched_costly_dep_str)
1709 {
f676971a 1710 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1711 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1712 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1713 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1714 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1715 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1716 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1717 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1718 else
c4ad648e 1719 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1720 }
1721
1722 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1723 rs6000_sched_insert_nops
1724 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1725
cbe26ab8
DN
1726 if (rs6000_sched_insert_nops_str)
1727 {
1728 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1729 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1730 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1731 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1732 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1733 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1734 else
c4ad648e 1735 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1736 }
1737
c81bebd7 1738#ifdef TARGET_REGNAMES
a4f6c312
SS
1739 /* If the user desires alternate register names, copy in the
1740 alternate names now. */
c81bebd7 1741 if (TARGET_REGNAMES)
4e135bdd 1742 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1743#endif
1744
df01da37 1745 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1746 If -maix-struct-return or -msvr4-struct-return was explicitly
1747 used, don't override with the ABI default. */
df01da37
DE
1748 if (!rs6000_explicit_options.aix_struct_ret)
1749 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1750
602ea4d3 1751 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1752 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1753
f676971a 1754 if (TARGET_TOC)
9ebbca7d 1755 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1756
301d03af
RS
1757 /* We can only guarantee the availability of DI pseudo-ops when
1758 assembling for 64-bit targets. */
ae6c1efd 1759 if (!TARGET_64BIT)
301d03af
RS
1760 {
1761 targetm.asm_out.aligned_op.di = NULL;
1762 targetm.asm_out.unaligned_op.di = NULL;
1763 }
1764
1494c534
DE
1765 /* Set branch target alignment, if not optimizing for size. */
1766 if (!optimize_size)
1767 {
d296e02e
AP
1768 /* Cell wants to be aligned 8byte for dual issue. */
1769 if (rs6000_cpu == PROCESSOR_CELL)
1770 {
1771 if (align_functions <= 0)
1772 align_functions = 8;
1773 if (align_jumps <= 0)
1774 align_jumps = 8;
1775 if (align_loops <= 0)
1776 align_loops = 8;
1777 }
44cd321e 1778 if (rs6000_align_branch_targets)
1494c534
DE
1779 {
1780 if (align_functions <= 0)
1781 align_functions = 16;
1782 if (align_jumps <= 0)
1783 align_jumps = 16;
1784 if (align_loops <= 0)
1785 align_loops = 16;
1786 }
1787 if (align_jumps_max_skip <= 0)
1788 align_jumps_max_skip = 15;
1789 if (align_loops_max_skip <= 0)
1790 align_loops_max_skip = 15;
1791 }
2792d578 1792
71f123ca
FS
1793 /* Arrange to save and restore machine status around nested functions. */
1794 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1795
1796 /* We should always be splitting complex arguments, but we can't break
1797 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1798 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1799 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1800
1801 /* Initialize rs6000_cost with the appropriate target costs. */
1802 if (optimize_size)
1803 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1804 else
1805 switch (rs6000_cpu)
1806 {
1807 case PROCESSOR_RIOS1:
1808 rs6000_cost = &rios1_cost;
1809 break;
1810
1811 case PROCESSOR_RIOS2:
1812 rs6000_cost = &rios2_cost;
1813 break;
1814
1815 case PROCESSOR_RS64A:
1816 rs6000_cost = &rs64a_cost;
1817 break;
1818
1819 case PROCESSOR_MPCCORE:
1820 rs6000_cost = &mpccore_cost;
1821 break;
1822
1823 case PROCESSOR_PPC403:
1824 rs6000_cost = &ppc403_cost;
1825 break;
1826
1827 case PROCESSOR_PPC405:
1828 rs6000_cost = &ppc405_cost;
1829 break;
1830
1831 case PROCESSOR_PPC440:
1832 rs6000_cost = &ppc440_cost;
1833 break;
1834
1835 case PROCESSOR_PPC601:
1836 rs6000_cost = &ppc601_cost;
1837 break;
1838
1839 case PROCESSOR_PPC603:
1840 rs6000_cost = &ppc603_cost;
1841 break;
1842
1843 case PROCESSOR_PPC604:
1844 rs6000_cost = &ppc604_cost;
1845 break;
1846
1847 case PROCESSOR_PPC604e:
1848 rs6000_cost = &ppc604e_cost;
1849 break;
1850
1851 case PROCESSOR_PPC620:
8b897cfa
RS
1852 rs6000_cost = &ppc620_cost;
1853 break;
1854
f0517163
RS
1855 case PROCESSOR_PPC630:
1856 rs6000_cost = &ppc630_cost;
1857 break;
1858
982afe02 1859 case PROCESSOR_CELL:
d296e02e
AP
1860 rs6000_cost = &ppccell_cost;
1861 break;
1862
8b897cfa
RS
1863 case PROCESSOR_PPC750:
1864 case PROCESSOR_PPC7400:
1865 rs6000_cost = &ppc750_cost;
1866 break;
1867
1868 case PROCESSOR_PPC7450:
1869 rs6000_cost = &ppc7450_cost;
1870 break;
1871
1872 case PROCESSOR_PPC8540:
1873 rs6000_cost = &ppc8540_cost;
1874 break;
1875
fa41c305
EW
1876 case PROCESSOR_PPCE300C2:
1877 case PROCESSOR_PPCE300C3:
1878 rs6000_cost = &ppce300c2c3_cost;
1879 break;
1880
8b897cfa
RS
1881 case PROCESSOR_POWER4:
1882 case PROCESSOR_POWER5:
1883 rs6000_cost = &power4_cost;
1884 break;
1885
44cd321e
PS
1886 case PROCESSOR_POWER6:
1887 rs6000_cost = &power6_cost;
1888 break;
1889
8b897cfa 1890 default:
37409796 1891 gcc_unreachable ();
8b897cfa 1892 }
0b11da67
DE
1893
1894 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1895 set_param_value ("simultaneous-prefetches",
1896 rs6000_cost->simultaneous_prefetches);
1897 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1898 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1899 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1900 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1901 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1902 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1903
1904 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1905 can be optimized to ap = __builtin_next_arg (0). */
1906 if (DEFAULT_ABI != ABI_V4)
1907 targetm.expand_builtin_va_start = NULL;
5248c961 1908}
5accd822 1909
7ccf35ed
DN
1910/* Implement targetm.vectorize.builtin_mask_for_load. */
1911static tree
1912rs6000_builtin_mask_for_load (void)
1913{
1914 if (TARGET_ALTIVEC)
1915 return altivec_builtin_mask_for_load;
1916 else
1917 return 0;
1918}
1919
f57d17f1
TM
1920/* Implement targetm.vectorize.builtin_conversion. */
1921static tree
1922rs6000_builtin_conversion (enum tree_code code, tree type)
1923{
1924 if (!TARGET_ALTIVEC)
1925 return NULL_TREE;
982afe02 1926
f57d17f1
TM
1927 switch (code)
1928 {
1929 case FLOAT_EXPR:
1930 switch (TYPE_MODE (type))
1931 {
1932 case V4SImode:
982afe02 1933 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1934 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1935 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1936 default:
1937 return NULL_TREE;
1938 }
1939 default:
1940 return NULL_TREE;
1941 }
1942}
1943
89d67cca
DN
1944/* Implement targetm.vectorize.builtin_mul_widen_even. */
1945static tree
1946rs6000_builtin_mul_widen_even (tree type)
1947{
1948 if (!TARGET_ALTIVEC)
1949 return NULL_TREE;
1950
1951 switch (TYPE_MODE (type))
1952 {
1953 case V8HImode:
982afe02 1954 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1955 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1956 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1957
1958 case V16QImode:
1959 return TYPE_UNSIGNED (type) ?
1960 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1961 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1962 default:
1963 return NULL_TREE;
1964 }
1965}
1966
1967/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1968static tree
1969rs6000_builtin_mul_widen_odd (tree type)
1970{
1971 if (!TARGET_ALTIVEC)
1972 return NULL_TREE;
1973
1974 switch (TYPE_MODE (type))
1975 {
1976 case V8HImode:
1977 return TYPE_UNSIGNED (type) ?
1978 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1979 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1980
1981 case V16QImode:
1982 return TYPE_UNSIGNED (type) ?
1983 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1984 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1985 default:
1986 return NULL_TREE;
1987 }
1988}
1989
5b900a4c
DN
1990
1991/* Return true iff, data reference of TYPE can reach vector alignment (16)
1992 after applying N number of iterations. This routine does not determine
1993 how may iterations are required to reach desired alignment. */
1994
1995static bool
3101faab 1996rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1997{
1998 if (is_packed)
1999 return false;
2000
2001 if (TARGET_32BIT)
2002 {
2003 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2004 return true;
2005
2006 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2007 return true;
2008
2009 return false;
2010 }
2011 else
2012 {
2013 if (TARGET_MACHO)
2014 return false;
2015
2016 /* Assuming that all other types are naturally aligned. CHECKME! */
2017 return true;
2018 }
2019}
2020
5da702b1
AH
2021/* Handle generic options of the form -mfoo=yes/no.
2022 NAME is the option name.
2023 VALUE is the option value.
2024 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2025 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2026static void
5da702b1 2027rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2028{
5da702b1 2029 if (value == 0)
993f19a8 2030 return;
5da702b1
AH
2031 else if (!strcmp (value, "yes"))
2032 *flag = 1;
2033 else if (!strcmp (value, "no"))
2034 *flag = 0;
08b57fb3 2035 else
5da702b1 2036 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2037}
2038
c4501e62
JJ
2039/* Validate and record the size specified with the -mtls-size option. */
2040
2041static void
863d938c 2042rs6000_parse_tls_size_option (void)
c4501e62
JJ
2043{
2044 if (rs6000_tls_size_string == 0)
2045 return;
2046 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2047 rs6000_tls_size = 16;
2048 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2049 rs6000_tls_size = 32;
2050 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2051 rs6000_tls_size = 64;
2052 else
9e637a26 2053 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2054}
2055
5accd822 2056void
a2369ed3 2057optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2058{
2e3f0db6
DJ
2059 if (DEFAULT_ABI == ABI_DARWIN)
2060 /* The Darwin libraries never set errno, so we might as well
2061 avoid calling them when that's the only reason we would. */
2062 flag_errno_math = 0;
59d6560b
DE
2063
2064 /* Double growth factor to counter reduced min jump length. */
2065 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2066
2067 /* Enable section anchors by default.
2068 Skip section anchors for Objective C and Objective C++
2069 until front-ends fixed. */
23f99493 2070 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2071 flag_section_anchors = 1;
5accd822 2072}
78f5898b
AH
2073
2074/* Implement TARGET_HANDLE_OPTION. */
2075
2076static bool
2077rs6000_handle_option (size_t code, const char *arg, int value)
2078{
2079 switch (code)
2080 {
2081 case OPT_mno_power:
2082 target_flags &= ~(MASK_POWER | MASK_POWER2
2083 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2084 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2085 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2086 break;
2087 case OPT_mno_powerpc:
2088 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2089 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2090 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2091 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2092 break;
2093 case OPT_mfull_toc:
d2894ab5
DE
2094 target_flags &= ~MASK_MINIMAL_TOC;
2095 TARGET_NO_FP_IN_TOC = 0;
2096 TARGET_NO_SUM_IN_TOC = 0;
2097 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2098#ifdef TARGET_USES_SYSV4_OPT
2099 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2100 just the same as -mminimal-toc. */
2101 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2102 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2103#endif
2104 break;
2105
2106#ifdef TARGET_USES_SYSV4_OPT
2107 case OPT_mtoc:
2108 /* Make -mtoc behave like -mminimal-toc. */
2109 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2110 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2111 break;
2112#endif
2113
2114#ifdef TARGET_USES_AIX64_OPT
2115 case OPT_maix64:
2116#else
2117 case OPT_m64:
2118#endif
2c9c9afd
AM
2119 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2120 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2121 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2122 break;
2123
2124#ifdef TARGET_USES_AIX64_OPT
2125 case OPT_maix32:
2126#else
2127 case OPT_m32:
2128#endif
2129 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2130 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2131 break;
2132
2133 case OPT_minsert_sched_nops_:
2134 rs6000_sched_insert_nops_str = arg;
2135 break;
2136
2137 case OPT_mminimal_toc:
2138 if (value == 1)
2139 {
d2894ab5
DE
2140 TARGET_NO_FP_IN_TOC = 0;
2141 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2142 }
2143 break;
2144
2145 case OPT_mpower:
2146 if (value == 1)
c2dba4ab
AH
2147 {
2148 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2149 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2150 }
78f5898b
AH
2151 break;
2152
2153 case OPT_mpower2:
2154 if (value == 1)
c2dba4ab
AH
2155 {
2156 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2157 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2158 }
78f5898b
AH
2159 break;
2160
2161 case OPT_mpowerpc_gpopt:
2162 case OPT_mpowerpc_gfxopt:
2163 if (value == 1)
c2dba4ab
AH
2164 {
2165 target_flags |= MASK_POWERPC;
2166 target_flags_explicit |= MASK_POWERPC;
2167 }
78f5898b
AH
2168 break;
2169
df01da37
DE
2170 case OPT_maix_struct_return:
2171 case OPT_msvr4_struct_return:
2172 rs6000_explicit_options.aix_struct_ret = true;
2173 break;
2174
78f5898b 2175 case OPT_mvrsave_:
a2db2771 2176 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2177 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2178 break;
78f5898b
AH
2179
2180 case OPT_misel_:
2181 rs6000_explicit_options.isel = true;
2182 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2183 break;
2184
2185 case OPT_mspe_:
2186 rs6000_explicit_options.spe = true;
2187 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2188 break;
2189
2190 case OPT_mdebug_:
2191 rs6000_debug_name = arg;
2192 break;
2193
2194#ifdef TARGET_USES_SYSV4_OPT
2195 case OPT_mcall_:
2196 rs6000_abi_name = arg;
2197 break;
2198
2199 case OPT_msdata_:
2200 rs6000_sdata_name = arg;
2201 break;
2202
2203 case OPT_mtls_size_:
2204 rs6000_tls_size_string = arg;
2205 break;
2206
2207 case OPT_mrelocatable:
2208 if (value == 1)
c2dba4ab 2209 {
e0bf274f
AM
2210 target_flags |= MASK_MINIMAL_TOC;
2211 target_flags_explicit |= MASK_MINIMAL_TOC;
2212 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2213 }
78f5898b
AH
2214 break;
2215
2216 case OPT_mrelocatable_lib:
2217 if (value == 1)
c2dba4ab 2218 {
e0bf274f
AM
2219 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2220 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2221 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2222 }
78f5898b 2223 else
c2dba4ab
AH
2224 {
2225 target_flags &= ~MASK_RELOCATABLE;
2226 target_flags_explicit |= MASK_RELOCATABLE;
2227 }
78f5898b
AH
2228 break;
2229#endif
2230
2231 case OPT_mabi_:
78f5898b
AH
2232 if (!strcmp (arg, "altivec"))
2233 {
a2db2771 2234 rs6000_explicit_options.altivec_abi = true;
78f5898b 2235 rs6000_altivec_abi = 1;
a2db2771
JJ
2236
2237 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2238 rs6000_spe_abi = 0;
2239 }
2240 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2241 {
a2db2771 2242 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2243 rs6000_altivec_abi = 0;
2244 }
78f5898b
AH
2245 else if (! strcmp (arg, "spe"))
2246 {
a2db2771 2247 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2248 rs6000_spe_abi = 1;
2249 rs6000_altivec_abi = 0;
2250 if (!TARGET_SPE_ABI)
2251 error ("not configured for ABI: '%s'", arg);
2252 }
2253 else if (! strcmp (arg, "no-spe"))
d3603e8c 2254 {
a2db2771 2255 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2256 rs6000_spe_abi = 0;
2257 }
78f5898b
AH
2258
2259 /* These are here for testing during development only, do not
2260 document in the manual please. */
2261 else if (! strcmp (arg, "d64"))
2262 {
2263 rs6000_darwin64_abi = 1;
2264 warning (0, "Using darwin64 ABI");
2265 }
2266 else if (! strcmp (arg, "d32"))
2267 {
2268 rs6000_darwin64_abi = 0;
2269 warning (0, "Using old darwin ABI");
2270 }
2271
602ea4d3
JJ
2272 else if (! strcmp (arg, "ibmlongdouble"))
2273 {
d3603e8c 2274 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2275 rs6000_ieeequad = 0;
2276 warning (0, "Using IBM extended precision long double");
2277 }
2278 else if (! strcmp (arg, "ieeelongdouble"))
2279 {
d3603e8c 2280 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2281 rs6000_ieeequad = 1;
2282 warning (0, "Using IEEE extended precision long double");
2283 }
2284
78f5898b
AH
2285 else
2286 {
2287 error ("unknown ABI specified: '%s'", arg);
2288 return false;
2289 }
2290 break;
2291
2292 case OPT_mcpu_:
2293 rs6000_select[1].string = arg;
2294 break;
2295
2296 case OPT_mtune_:
2297 rs6000_select[2].string = arg;
2298 break;
2299
2300 case OPT_mtraceback_:
2301 rs6000_traceback_name = arg;
2302 break;
2303
2304 case OPT_mfloat_gprs_:
2305 rs6000_explicit_options.float_gprs = true;
2306 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2307 rs6000_float_gprs = 1;
2308 else if (! strcmp (arg, "double"))
2309 rs6000_float_gprs = 2;
2310 else if (! strcmp (arg, "no"))
2311 rs6000_float_gprs = 0;
2312 else
2313 {
2314 error ("invalid option for -mfloat-gprs: '%s'", arg);
2315 return false;
2316 }
2317 break;
2318
2319 case OPT_mlong_double_:
2320 rs6000_explicit_options.long_double = true;
2321 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2322 if (value != 64 && value != 128)
2323 {
2324 error ("Unknown switch -mlong-double-%s", arg);
2325 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2326 return false;
2327 }
2328 else
2329 rs6000_long_double_type_size = value;
2330 break;
2331
2332 case OPT_msched_costly_dep_:
2333 rs6000_sched_costly_dep_str = arg;
2334 break;
2335
2336 case OPT_malign_:
2337 rs6000_explicit_options.alignment = true;
2338 if (! strcmp (arg, "power"))
2339 {
2340 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2341 some C library functions, so warn about it. The flag may be
2342 useful for performance studies from time to time though, so
2343 don't disable it entirely. */
2344 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2345 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2346 " it is incompatible with the installed C and C++ libraries");
2347 rs6000_alignment_flags = MASK_ALIGN_POWER;
2348 }
2349 else if (! strcmp (arg, "natural"))
2350 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2351 else
2352 {
2353 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2354 return false;
2355 }
2356 break;
2357 }
2358 return true;
2359}
3cfa4909
MM
2360\f
2361/* Do anything needed at the start of the asm file. */
2362
1bc7c5b6 2363static void
863d938c 2364rs6000_file_start (void)
3cfa4909 2365{
c4d38ccb 2366 size_t i;
3cfa4909 2367 char buffer[80];
d330fd93 2368 const char *start = buffer;
3cfa4909 2369 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2370 const char *default_cpu = TARGET_CPU_DEFAULT;
2371 FILE *file = asm_out_file;
2372
2373 default_file_start ();
2374
2375#ifdef TARGET_BI_ARCH
2376 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2377 default_cpu = 0;
2378#endif
3cfa4909
MM
2379
2380 if (flag_verbose_asm)
2381 {
2382 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2383 rs6000_select[0].string = default_cpu;
2384
b6a1cbae 2385 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2386 {
2387 ptr = &rs6000_select[i];
2388 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2389 {
2390 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2391 start = "";
2392 }
2393 }
2394
9c6b4ed9 2395 if (PPC405_ERRATUM77)
b0bfee6e 2396 {
9c6b4ed9 2397 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2398 start = "";
2399 }
b0bfee6e 2400
b91da81f 2401#ifdef USING_ELFOS_H
3cfa4909
MM
2402 switch (rs6000_sdata)
2403 {
2404 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2405 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2406 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2407 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2408 }
2409
2410 if (rs6000_sdata && g_switch_value)
2411 {
307b599c
MK
2412 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2413 g_switch_value);
3cfa4909
MM
2414 start = "";
2415 }
2416#endif
2417
2418 if (*start == '\0')
949ea356 2419 putc ('\n', file);
3cfa4909 2420 }
b723e82f 2421
e51917ae
JM
2422#ifdef HAVE_AS_GNU_ATTRIBUTE
2423 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2424 {
2425 fprintf (file, "\t.gnu_attribute 4, %d\n",
2426 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2427 fprintf (file, "\t.gnu_attribute 8, %d\n",
2428 (TARGET_ALTIVEC_ABI ? 2
2429 : TARGET_SPE_ABI ? 3
2430 : 1));
2431 }
e51917ae
JM
2432#endif
2433
b723e82f
JJ
2434 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2435 {
d6b5193b
RS
2436 switch_to_section (toc_section);
2437 switch_to_section (text_section);
b723e82f 2438 }
3cfa4909 2439}
c4e18b1c 2440
5248c961 2441\f
a0ab749a 2442/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2443
2444int
863d938c 2445direct_return (void)
9878760c 2446{
4697a36c
MM
2447 if (reload_completed)
2448 {
2449 rs6000_stack_t *info = rs6000_stack_info ();
2450
2451 if (info->first_gp_reg_save == 32
2452 && info->first_fp_reg_save == 64
00b960c7 2453 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2454 && ! info->lr_save_p
2455 && ! info->cr_save_p
00b960c7 2456 && info->vrsave_mask == 0
c81fc13e 2457 && ! info->push_p)
4697a36c
MM
2458 return 1;
2459 }
2460
2461 return 0;
9878760c
RK
2462}
2463
4e74d8ec
MM
2464/* Return the number of instructions it takes to form a constant in an
2465 integer register. */
2466
48d72335 2467int
a2369ed3 2468num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2469{
2470 /* signed constant loadable with {cal|addi} */
547b216d 2471 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2472 return 1;
2473
4e74d8ec 2474 /* constant loadable with {cau|addis} */
547b216d
DE
2475 else if ((value & 0xffff) == 0
2476 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2477 return 1;
2478
5f59ecb7 2479#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2480 else if (TARGET_POWERPC64)
4e74d8ec 2481 {
a65c591c
DE
2482 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2483 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2484
a65c591c 2485 if (high == 0 || high == -1)
4e74d8ec
MM
2486 return 2;
2487
a65c591c 2488 high >>= 1;
4e74d8ec 2489
a65c591c 2490 if (low == 0)
4e74d8ec 2491 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2492 else
2493 return (num_insns_constant_wide (high)
e396202a 2494 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2495 }
2496#endif
2497
2498 else
2499 return 2;
2500}
2501
2502int
a2369ed3 2503num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2504{
37409796 2505 HOST_WIDE_INT low, high;
bb8df8a6 2506
37409796 2507 switch (GET_CODE (op))
0d30d435 2508 {
37409796 2509 case CONST_INT:
0d30d435 2510#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2511 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2512 && mask64_operand (op, mode))
c4ad648e 2513 return 2;
0d30d435
DE
2514 else
2515#endif
2516 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2517
37409796 2518 case CONST_DOUBLE:
e41b2a33 2519 if (mode == SFmode || mode == SDmode)
37409796
NS
2520 {
2521 long l;
2522 REAL_VALUE_TYPE rv;
bb8df8a6 2523
37409796 2524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2525 if (DECIMAL_FLOAT_MODE_P (mode))
2526 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2527 else
2528 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2529 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2530 }
a260abc9 2531
37409796
NS
2532 if (mode == VOIDmode || mode == DImode)
2533 {
2534 high = CONST_DOUBLE_HIGH (op);
2535 low = CONST_DOUBLE_LOW (op);
2536 }
2537 else
2538 {
2539 long l[2];
2540 REAL_VALUE_TYPE rv;
bb8df8a6 2541
37409796 2542 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2543 if (DECIMAL_FLOAT_MODE_P (mode))
2544 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2545 else
2546 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2547 high = l[WORDS_BIG_ENDIAN == 0];
2548 low = l[WORDS_BIG_ENDIAN != 0];
2549 }
47ad8c61 2550
37409796
NS
2551 if (TARGET_32BIT)
2552 return (num_insns_constant_wide (low)
2553 + num_insns_constant_wide (high));
2554 else
2555 {
2556 if ((high == 0 && low >= 0)
2557 || (high == -1 && low < 0))
2558 return num_insns_constant_wide (low);
bb8df8a6 2559
1990cd79 2560 else if (mask64_operand (op, mode))
37409796 2561 return 2;
bb8df8a6 2562
37409796
NS
2563 else if (low == 0)
2564 return num_insns_constant_wide (high) + 1;
bb8df8a6 2565
37409796
NS
2566 else
2567 return (num_insns_constant_wide (high)
2568 + num_insns_constant_wide (low) + 1);
2569 }
bb8df8a6 2570
37409796
NS
2571 default:
2572 gcc_unreachable ();
4e74d8ec 2573 }
4e74d8ec
MM
2574}
2575
0972012c
RS
2576/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2577 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2578 corresponding element of the vector, but for V4SFmode and V2SFmode,
2579 the corresponding "float" is interpreted as an SImode integer. */
2580
2581static HOST_WIDE_INT
2582const_vector_elt_as_int (rtx op, unsigned int elt)
2583{
2584 rtx tmp = CONST_VECTOR_ELT (op, elt);
2585 if (GET_MODE (op) == V4SFmode
2586 || GET_MODE (op) == V2SFmode)
2587 tmp = gen_lowpart (SImode, tmp);
2588 return INTVAL (tmp);
2589}
452a7d36 2590
77ccdfed 2591/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2592 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2593 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2594 all items are set to the same value and contain COPIES replicas of the
2595 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2596 operand and the others are set to the value of the operand's msb. */
2597
2598static bool
2599vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2600{
66180ff3
PB
2601 enum machine_mode mode = GET_MODE (op);
2602 enum machine_mode inner = GET_MODE_INNER (mode);
2603
2604 unsigned i;
2605 unsigned nunits = GET_MODE_NUNITS (mode);
2606 unsigned bitsize = GET_MODE_BITSIZE (inner);
2607 unsigned mask = GET_MODE_MASK (inner);
2608
0972012c 2609 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2610 HOST_WIDE_INT splat_val = val;
2611 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2612
2613 /* Construct the value to be splatted, if possible. If not, return 0. */
2614 for (i = 2; i <= copies; i *= 2)
452a7d36 2615 {
66180ff3
PB
2616 HOST_WIDE_INT small_val;
2617 bitsize /= 2;
2618 small_val = splat_val >> bitsize;
2619 mask >>= bitsize;
2620 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2621 return false;
2622 splat_val = small_val;
2623 }
c4ad648e 2624
66180ff3
PB
2625 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2626 if (EASY_VECTOR_15 (splat_val))
2627 ;
2628
2629 /* Also check if we can splat, and then add the result to itself. Do so if
2630 the value is positive, of if the splat instruction is using OP's mode;
2631 for splat_val < 0, the splat and the add should use the same mode. */
2632 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2633 && (splat_val >= 0 || (step == 1 && copies == 1)))
2634 ;
2635
2636 else
2637 return false;
2638
2639 /* Check if VAL is present in every STEP-th element, and the
2640 other elements are filled with its most significant bit. */
2641 for (i = 0; i < nunits - 1; ++i)
2642 {
2643 HOST_WIDE_INT desired_val;
2644 if (((i + 1) & (step - 1)) == 0)
2645 desired_val = val;
2646 else
2647 desired_val = msb_val;
2648
0972012c 2649 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2650 return false;
452a7d36 2651 }
66180ff3
PB
2652
2653 return true;
452a7d36
HP
2654}
2655
69ef87e2 2656
77ccdfed 2657/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2658 with a vspltisb, vspltish or vspltisw. */
2659
2660bool
2661easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2662{
66180ff3 2663 unsigned step, copies;
d744e06e 2664
66180ff3
PB
2665 if (mode == VOIDmode)
2666 mode = GET_MODE (op);
2667 else if (mode != GET_MODE (op))
2668 return false;
d744e06e 2669
66180ff3
PB
2670 /* Start with a vspltisw. */
2671 step = GET_MODE_NUNITS (mode) / 4;
2672 copies = 1;
2673
2674 if (vspltis_constant (op, step, copies))
2675 return true;
2676
2677 /* Then try with a vspltish. */
2678 if (step == 1)
2679 copies <<= 1;
2680 else
2681 step >>= 1;
2682
2683 if (vspltis_constant (op, step, copies))
2684 return true;
2685
2686 /* And finally a vspltisb. */
2687 if (step == 1)
2688 copies <<= 1;
2689 else
2690 step >>= 1;
2691
2692 if (vspltis_constant (op, step, copies))
2693 return true;
2694
2695 return false;
d744e06e
AH
2696}
2697
66180ff3
PB
2698/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2699 result is OP. Abort if it is not possible. */
d744e06e 2700
f676971a 2701rtx
66180ff3 2702gen_easy_altivec_constant (rtx op)
452a7d36 2703{
66180ff3
PB
2704 enum machine_mode mode = GET_MODE (op);
2705 int nunits = GET_MODE_NUNITS (mode);
2706 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2707 unsigned step = nunits / 4;
2708 unsigned copies = 1;
2709
2710 /* Start with a vspltisw. */
2711 if (vspltis_constant (op, step, copies))
2712 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2713
2714 /* Then try with a vspltish. */
2715 if (step == 1)
2716 copies <<= 1;
2717 else
2718 step >>= 1;
2719
2720 if (vspltis_constant (op, step, copies))
2721 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2722
2723 /* And finally a vspltisb. */
2724 if (step == 1)
2725 copies <<= 1;
2726 else
2727 step >>= 1;
2728
2729 if (vspltis_constant (op, step, copies))
2730 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2731
2732 gcc_unreachable ();
d744e06e
AH
2733}
2734
2735const char *
a2369ed3 2736output_vec_const_move (rtx *operands)
d744e06e
AH
2737{
2738 int cst, cst2;
2739 enum machine_mode mode;
2740 rtx dest, vec;
2741
2742 dest = operands[0];
2743 vec = operands[1];
d744e06e 2744 mode = GET_MODE (dest);
69ef87e2 2745
d744e06e
AH
2746 if (TARGET_ALTIVEC)
2747 {
66180ff3 2748 rtx splat_vec;
d744e06e
AH
2749 if (zero_constant (vec, mode))
2750 return "vxor %0,%0,%0";
37409796 2751
66180ff3
PB
2752 splat_vec = gen_easy_altivec_constant (vec);
2753 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2754 operands[1] = XEXP (splat_vec, 0);
2755 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2756 return "#";
bb8df8a6 2757
66180ff3 2758 switch (GET_MODE (splat_vec))
98ef3137 2759 {
37409796 2760 case V4SImode:
66180ff3 2761 return "vspltisw %0,%1";
c4ad648e 2762
37409796 2763 case V8HImode:
66180ff3 2764 return "vspltish %0,%1";
c4ad648e 2765
37409796 2766 case V16QImode:
66180ff3 2767 return "vspltisb %0,%1";
bb8df8a6 2768
37409796
NS
2769 default:
2770 gcc_unreachable ();
98ef3137 2771 }
69ef87e2
AH
2772 }
2773
37409796 2774 gcc_assert (TARGET_SPE);
bb8df8a6 2775
37409796
NS
2776 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2777 pattern of V1DI, V4HI, and V2SF.
2778
2779 FIXME: We should probably return # and add post reload
2780 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2781 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2782 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2783 operands[1] = CONST_VECTOR_ELT (vec, 0);
2784 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2785 if (cst == cst2)
2786 return "li %0,%1\n\tevmergelo %0,%0,%0";
2787 else
2788 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2789}
2790
f5027409
RE
2791/* Initialize TARGET of vector PAIRED to VALS. */
2792
2793void
2794paired_expand_vector_init (rtx target, rtx vals)
2795{
2796 enum machine_mode mode = GET_MODE (target);
2797 int n_elts = GET_MODE_NUNITS (mode);
2798 int n_var = 0;
2799 rtx x, new, tmp, constant_op, op1, op2;
2800 int i;
2801
2802 for (i = 0; i < n_elts; ++i)
2803 {
2804 x = XVECEXP (vals, 0, i);
2805 if (!CONSTANT_P (x))
2806 ++n_var;
2807 }
2808 if (n_var == 0)
2809 {
2810 /* Load from constant pool. */
2811 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2812 return;
2813 }
2814
2815 if (n_var == 2)
2816 {
2817 /* The vector is initialized only with non-constants. */
2818 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2819 XVECEXP (vals, 0, 1));
2820
2821 emit_move_insn (target, new);
2822 return;
2823 }
2824
2825 /* One field is non-constant and the other one is a constant. Load the
2826 constant from the constant pool and use ps_merge instruction to
2827 construct the whole vector. */
2828 op1 = XVECEXP (vals, 0, 0);
2829 op2 = XVECEXP (vals, 0, 1);
2830
2831 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2832
2833 tmp = gen_reg_rtx (GET_MODE (constant_op));
2834 emit_move_insn (tmp, constant_op);
2835
2836 if (CONSTANT_P (op1))
2837 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2838 else
2839 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2840
2841 emit_move_insn (target, new);
2842}
2843
e2e95f45
RE
2844void
2845paired_expand_vector_move (rtx operands[])
2846{
2847 rtx op0 = operands[0], op1 = operands[1];
2848
2849 emit_move_insn (op0, op1);
2850}
2851
2852/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2853 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2854 operands for the relation operation COND. This is a recursive
2855 function. */
2856
2857static void
2858paired_emit_vector_compare (enum rtx_code rcode,
2859 rtx dest, rtx op0, rtx op1,
2860 rtx cc_op0, rtx cc_op1)
2861{
2862 rtx tmp = gen_reg_rtx (V2SFmode);
2863 rtx tmp1, max, min, equal_zero;
2864
2865 gcc_assert (TARGET_PAIRED_FLOAT);
2866 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2867
2868 switch (rcode)
2869 {
2870 case LT:
2871 case LTU:
2872 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2873 return;
2874 case GE:
2875 case GEU:
2876 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2877 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2878 return;
2879 case LE:
2880 case LEU:
2881 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2882 return;
2883 case GT:
2884 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2885 return;
2886 case EQ:
2887 tmp1 = gen_reg_rtx (V2SFmode);
2888 max = gen_reg_rtx (V2SFmode);
2889 min = gen_reg_rtx (V2SFmode);
2890 equal_zero = gen_reg_rtx (V2SFmode);
2891
2892 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2893 emit_insn (gen_selv2sf4
2894 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2895 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2896 emit_insn (gen_selv2sf4
2897 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2898 emit_insn (gen_subv2sf3 (tmp1, min, max));
2899 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2900 return;
2901 case NE:
2902 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2903 return;
2904 case UNLE:
2905 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2906 return;
2907 case UNLT:
2908 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2909 return;
2910 case UNGE:
2911 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2912 return;
2913 case UNGT:
2914 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2915 return;
2916 default:
2917 gcc_unreachable ();
2918 }
2919
2920 return;
2921}
2922
2923/* Emit vector conditional expression.
2924 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2925 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2926
2927int
2928paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2929 rtx cond, rtx cc_op0, rtx cc_op1)
2930{
2931 enum rtx_code rcode = GET_CODE (cond);
2932
2933 if (!TARGET_PAIRED_FLOAT)
2934 return 0;
2935
2936 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2937
2938 return 1;
2939}
2940
7a4eca66
DE
2941/* Initialize vector TARGET to VALS. */
2942
2943void
2944rs6000_expand_vector_init (rtx target, rtx vals)
2945{
2946 enum machine_mode mode = GET_MODE (target);
2947 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2948 int n_elts = GET_MODE_NUNITS (mode);
2949 int n_var = 0, one_var = -1;
2950 bool all_same = true, all_const_zero = true;
2951 rtx x, mem;
2952 int i;
2953
2954 for (i = 0; i < n_elts; ++i)
2955 {
2956 x = XVECEXP (vals, 0, i);
2957 if (!CONSTANT_P (x))
2958 ++n_var, one_var = i;
2959 else if (x != CONST0_RTX (inner_mode))
2960 all_const_zero = false;
2961
2962 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2963 all_same = false;
2964 }
2965
2966 if (n_var == 0)
2967 {
501fb355 2968 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
2969 if (mode != V4SFmode && all_const_zero)
2970 {
2971 /* Zero register. */
2972 emit_insn (gen_rtx_SET (VOIDmode, target,
2973 gen_rtx_XOR (mode, target, target)));
2974 return;
2975 }
501fb355 2976 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
2977 {
2978 /* Splat immediate. */
501fb355 2979 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
2980 return;
2981 }
2982 else if (all_same)
2983 ; /* Splat vector element. */
2984 else
2985 {
2986 /* Load from constant pool. */
501fb355 2987 emit_move_insn (target, const_vec);
7a4eca66
DE
2988 return;
2989 }
2990 }
2991
2992 /* Store value to stack temp. Load vector element. Splat. */
2993 if (all_same)
2994 {
2995 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2996 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2997 XVECEXP (vals, 0, 0));
2998 x = gen_rtx_UNSPEC (VOIDmode,
2999 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3000 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3001 gen_rtvec (2,
3002 gen_rtx_SET (VOIDmode,
3003 target, mem),
3004 x)));
3005 x = gen_rtx_VEC_SELECT (inner_mode, target,
3006 gen_rtx_PARALLEL (VOIDmode,
3007 gen_rtvec (1, const0_rtx)));
3008 emit_insn (gen_rtx_SET (VOIDmode, target,
3009 gen_rtx_VEC_DUPLICATE (mode, x)));
3010 return;
3011 }
3012
3013 /* One field is non-constant. Load constant then overwrite
3014 varying field. */
3015 if (n_var == 1)
3016 {
3017 rtx copy = copy_rtx (vals);
3018
57b51d4d 3019 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3020 varying element. */
3021 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3022 rs6000_expand_vector_init (target, copy);
3023
3024 /* Insert variable. */
3025 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3026 return;
3027 }
3028
3029 /* Construct the vector in memory one field at a time
3030 and load the whole vector. */
3031 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3032 for (i = 0; i < n_elts; i++)
3033 emit_move_insn (adjust_address_nv (mem, inner_mode,
3034 i * GET_MODE_SIZE (inner_mode)),
3035 XVECEXP (vals, 0, i));
3036 emit_move_insn (target, mem);
3037}
3038
3039/* Set field ELT of TARGET to VAL. */
3040
3041void
3042rs6000_expand_vector_set (rtx target, rtx val, int elt)
3043{
3044 enum machine_mode mode = GET_MODE (target);
3045 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3046 rtx reg = gen_reg_rtx (mode);
3047 rtx mask, mem, x;
3048 int width = GET_MODE_SIZE (inner_mode);
3049 int i;
3050
3051 /* Load single variable value. */
3052 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3053 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3054 x = gen_rtx_UNSPEC (VOIDmode,
3055 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3056 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3057 gen_rtvec (2,
3058 gen_rtx_SET (VOIDmode,
3059 reg, mem),
3060 x)));
3061
3062 /* Linear sequence. */
3063 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3064 for (i = 0; i < 16; ++i)
3065 XVECEXP (mask, 0, i) = GEN_INT (i);
3066
3067 /* Set permute mask to insert element into target. */
3068 for (i = 0; i < width; ++i)
3069 XVECEXP (mask, 0, elt*width + i)
3070 = GEN_INT (i + 0x10);
3071 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3072 x = gen_rtx_UNSPEC (mode,
3073 gen_rtvec (3, target, reg,
3074 force_reg (V16QImode, x)),
3075 UNSPEC_VPERM);
3076 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3077}
3078
3079/* Extract field ELT from VEC into TARGET. */
3080
3081void
3082rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3083{
3084 enum machine_mode mode = GET_MODE (vec);
3085 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3086 rtx mem, x;
3087
3088 /* Allocate mode-sized buffer. */
3089 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3090
3091 /* Add offset to field within buffer matching vector element. */
3092 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3093
3094 /* Store single field into mode-sized buffer. */
3095 x = gen_rtx_UNSPEC (VOIDmode,
3096 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3097 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3098 gen_rtvec (2,
3099 gen_rtx_SET (VOIDmode,
3100 mem, vec),
3101 x)));
3102 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3103}
3104
0ba1b2ff
AM
3105/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3106 implement ANDing by the mask IN. */
3107void
a2369ed3 3108build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3109{
3110#if HOST_BITS_PER_WIDE_INT >= 64
3111 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3112 int shift;
3113
37409796 3114 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3115
3116 c = INTVAL (in);
3117 if (c & 1)
3118 {
3119 /* Assume c initially something like 0x00fff000000fffff. The idea
3120 is to rotate the word so that the middle ^^^^^^ group of zeros
3121 is at the MS end and can be cleared with an rldicl mask. We then
3122 rotate back and clear off the MS ^^ group of zeros with a
3123 second rldicl. */
3124 c = ~c; /* c == 0xff000ffffff00000 */
3125 lsb = c & -c; /* lsb == 0x0000000000100000 */
3126 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3127 c = ~c; /* c == 0x00fff000000fffff */
3128 c &= -lsb; /* c == 0x00fff00000000000 */
3129 lsb = c & -c; /* lsb == 0x0000100000000000 */
3130 c = ~c; /* c == 0xff000fffffffffff */
3131 c &= -lsb; /* c == 0xff00000000000000 */
3132 shift = 0;
3133 while ((lsb >>= 1) != 0)
3134 shift++; /* shift == 44 on exit from loop */
3135 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3136 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3137 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3138 }
3139 else
0ba1b2ff
AM
3140 {
3141 /* Assume c initially something like 0xff000f0000000000. The idea
3142 is to rotate the word so that the ^^^ middle group of zeros
3143 is at the LS end and can be cleared with an rldicr mask. We then
3144 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3145 a second rldicr. */
3146 lsb = c & -c; /* lsb == 0x0000010000000000 */
3147 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3148 c = ~c; /* c == 0x00fff0ffffffffff */
3149 c &= -lsb; /* c == 0x00fff00000000000 */
3150 lsb = c & -c; /* lsb == 0x0000100000000000 */
3151 c = ~c; /* c == 0xff000fffffffffff */
3152 c &= -lsb; /* c == 0xff00000000000000 */
3153 shift = 0;
3154 while ((lsb >>= 1) != 0)
3155 shift++; /* shift == 44 on exit from loop */
3156 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3157 m1 >>= shift; /* m1 == 0x0000000000000fff */
3158 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3159 }
3160
3161 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3162 masks will be all 1's. We are guaranteed more than one transition. */
3163 out[0] = GEN_INT (64 - shift);
3164 out[1] = GEN_INT (m1);
3165 out[2] = GEN_INT (shift);
3166 out[3] = GEN_INT (m2);
3167#else
045572c7
GK
3168 (void)in;
3169 (void)out;
37409796 3170 gcc_unreachable ();
0ba1b2ff 3171#endif
a260abc9
DE
3172}
3173
54b695e7 3174/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3175
3176bool
54b695e7
AH
3177invalid_e500_subreg (rtx op, enum machine_mode mode)
3178{
61c76239
JM
3179 if (TARGET_E500_DOUBLE)
3180 {
17caeff2
JM
3181 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3182 subreg:TI and reg:TF. */
61c76239 3183 if (GET_CODE (op) == SUBREG
17caeff2 3184 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3185 && REG_P (SUBREG_REG (op))
17caeff2 3186 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3187 || GET_MODE (SUBREG_REG (op)) == TFmode
3188 || GET_MODE (SUBREG_REG (op)) == DDmode
3189 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3190 return true;
3191
17caeff2
JM
3192 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3193 reg:TI. */
61c76239 3194 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3195 && (mode == DFmode || mode == TFmode
3196 || mode == DDmode || mode == TDmode)
61c76239 3197 && REG_P (SUBREG_REG (op))
17caeff2
JM
3198 && (GET_MODE (SUBREG_REG (op)) == DImode
3199 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3200 return true;
3201 }
54b695e7 3202
61c76239
JM
3203 if (TARGET_SPE
3204 && GET_CODE (op) == SUBREG
3205 && mode == SImode
54b695e7 3206 && REG_P (SUBREG_REG (op))
14502dad 3207 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3208 return true;
3209
3210 return false;
3211}
3212
58182de3 3213/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3214 field is an FP double while the FP fields remain word aligned. */
3215
19d66194 3216unsigned int
fa5b0972
AM
3217rs6000_special_round_type_align (tree type, unsigned int computed,
3218 unsigned int specified)
95727fb8 3219{
fa5b0972 3220 unsigned int align = MAX (computed, specified);
95727fb8 3221 tree field = TYPE_FIELDS (type);
95727fb8 3222
bb8df8a6 3223 /* Skip all non field decls */
85962ac8 3224 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3225 field = TREE_CHAIN (field);
3226
fa5b0972
AM
3227 if (field != NULL && field != type)
3228 {
3229 type = TREE_TYPE (field);
3230 while (TREE_CODE (type) == ARRAY_TYPE)
3231 type = TREE_TYPE (type);
3232
3233 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3234 align = MAX (align, 64);
3235 }
95727fb8 3236
fa5b0972 3237 return align;
95727fb8
AP
3238}
3239
58182de3
GK
3240/* Darwin increases record alignment to the natural alignment of
3241 the first field. */
3242
3243unsigned int
3244darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3245 unsigned int specified)
3246{
3247 unsigned int align = MAX (computed, specified);
3248
3249 if (TYPE_PACKED (type))
3250 return align;
3251
3252 /* Find the first field, looking down into aggregates. */
3253 do {
3254 tree field = TYPE_FIELDS (type);
3255 /* Skip all non field decls */
3256 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3257 field = TREE_CHAIN (field);
3258 if (! field)
3259 break;
3260 type = TREE_TYPE (field);
3261 while (TREE_CODE (type) == ARRAY_TYPE)
3262 type = TREE_TYPE (type);
3263 } while (AGGREGATE_TYPE_P (type));
3264
3265 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3266 align = MAX (align, TYPE_ALIGN (type));
3267
3268 return align;
3269}
3270
a4f6c312 3271/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3272
3273int
f676971a 3274small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3275 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3276{
38c1f2d7 3277#if TARGET_ELF
5f59ecb7 3278 rtx sym_ref;
7509c759 3279
d9407988 3280 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3281 return 0;
a54d04b7 3282
f607bc57 3283 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3284 return 0;
3285
2aa42e6e
NF
3286 /* Vector and float memory instructions have a limited offset on the
3287 SPE, so using a vector or float variable directly as an operand is
3288 not useful. */
3289 if (TARGET_SPE
3290 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3291 return 0;
3292
88228c4b
MM
3293 if (GET_CODE (op) == SYMBOL_REF)
3294 sym_ref = op;
3295
3296 else if (GET_CODE (op) != CONST
3297 || GET_CODE (XEXP (op, 0)) != PLUS
3298 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3299 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3300 return 0;
3301
88228c4b 3302 else
dbf55e53
MM
3303 {
3304 rtx sum = XEXP (op, 0);
3305 HOST_WIDE_INT summand;
3306
3307 /* We have to be careful here, because it is the referenced address
c4ad648e 3308 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3309 summand = INTVAL (XEXP (sum, 1));
307b599c 3310 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3311 return 0;
dbf55e53
MM
3312
3313 sym_ref = XEXP (sum, 0);
3314 }
88228c4b 3315
20bfcd69 3316 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3317#else
3318 return 0;
3319#endif
7509c759 3320}
46c07df8 3321
3a1f863f 3322/* Return true if either operand is a general purpose register. */
46c07df8 3323
3a1f863f
DE
3324bool
3325gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3326{
3a1f863f
DE
3327 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3328 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3329}
3330
9ebbca7d 3331\f
4d588c14
RH
3332/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3333
f676971a
EC
3334static int
3335constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3336{
9390387d 3337 switch (GET_CODE (op))
9ebbca7d
GK
3338 {
3339 case SYMBOL_REF:
c4501e62
JJ
3340 if (RS6000_SYMBOL_REF_TLS_P (op))
3341 return 0;
3342 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3343 {
3344 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3345 {
3346 *have_sym = 1;
3347 return 1;
3348 }
3349 else
3350 return 0;
3351 }
3352 else if (! strcmp (XSTR (op, 0), toc_label_name))
3353 {
3354 *have_toc = 1;
3355 return 1;
3356 }
3357 else
3358 return 0;
9ebbca7d
GK
3359 case PLUS:
3360 case MINUS:
c1f11548
DE
3361 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3362 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3363 case CONST:
a4f6c312 3364 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3365 case CONST_INT:
a4f6c312 3366 return 1;
9ebbca7d 3367 default:
a4f6c312 3368 return 0;
9ebbca7d
GK
3369 }
3370}
3371
4d588c14 3372static bool
a2369ed3 3373constant_pool_expr_p (rtx op)
9ebbca7d
GK
3374{
3375 int have_sym = 0;
3376 int have_toc = 0;
3377 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3378}
3379
48d72335 3380bool
a2369ed3 3381toc_relative_expr_p (rtx op)
9ebbca7d 3382{
4d588c14
RH
3383 int have_sym = 0;
3384 int have_toc = 0;
3385 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3386}
3387
4d588c14 3388bool
a2369ed3 3389legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3390{
3391 return (TARGET_TOC
3392 && GET_CODE (x) == PLUS
3393 && GET_CODE (XEXP (x, 0)) == REG
3394 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3395 && constant_pool_expr_p (XEXP (x, 1)));
3396}
3397
d04b6e6e
EB
3398static bool
3399legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3400{
3401 return (DEFAULT_ABI == ABI_V4
3402 && !flag_pic && !TARGET_TOC
3403 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3404 && small_data_operand (x, mode));
3405}
3406
60cdabab
DE
3407/* SPE offset addressing is limited to 5-bits worth of double words. */
3408#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3409
76d2b81d
DJ
3410bool
3411rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3412{
3413 unsigned HOST_WIDE_INT offset, extra;
3414
3415 if (GET_CODE (x) != PLUS)
3416 return false;
3417 if (GET_CODE (XEXP (x, 0)) != REG)
3418 return false;
3419 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3420 return false;
60cdabab
DE
3421 if (legitimate_constant_pool_address_p (x))
3422 return true;
4d588c14
RH
3423 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3424 return false;
3425
3426 offset = INTVAL (XEXP (x, 1));
3427 extra = 0;
3428 switch (mode)
3429 {
3430 case V16QImode:
3431 case V8HImode:
3432 case V4SFmode:
3433 case V4SImode:
7a4eca66 3434 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3435 constant offset zero should not occur due to canonicalization. */
3436 return false;
4d588c14
RH
3437
3438 case V4HImode:
3439 case V2SImode:
3440 case V1DImode:
3441 case V2SFmode:
d42a3bae 3442 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3443 constant offset zero should not occur due to canonicalization. */
d42a3bae 3444 if (TARGET_PAIRED_FLOAT)
1a23970d 3445 return false;
4d588c14
RH
3446 /* SPE vector modes. */
3447 return SPE_CONST_OFFSET_OK (offset);
3448
3449 case DFmode:
7393f7f8 3450 case DDmode:
4d4cbc0e
AH
3451 if (TARGET_E500_DOUBLE)
3452 return SPE_CONST_OFFSET_OK (offset);
3453
4d588c14 3454 case DImode:
54b695e7
AH
3455 /* On e500v2, we may have:
3456
3457 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3458
3459 Which gets addressed with evldd instructions. */
3460 if (TARGET_E500_DOUBLE)
3461 return SPE_CONST_OFFSET_OK (offset);
3462
7393f7f8 3463 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3464 extra = 4;
3465 else if (offset & 3)
3466 return false;
3467 break;
3468
3469 case TFmode:
4d4447b5 3470 case TDmode:
17caeff2
JM
3471 if (TARGET_E500_DOUBLE)
3472 return (SPE_CONST_OFFSET_OK (offset)
3473 && SPE_CONST_OFFSET_OK (offset + 8));
3474
4d588c14 3475 case TImode:
7393f7f8 3476 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3477 extra = 12;
3478 else if (offset & 3)
3479 return false;
3480 else
3481 extra = 8;
3482 break;
3483
3484 default:
3485 break;
3486 }
3487
b1917422
AM
3488 offset += 0x8000;
3489 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3490}
3491
6fb5fa3c 3492bool
a2369ed3 3493legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3494{
3495 rtx op0, op1;
3496
3497 if (GET_CODE (x) != PLUS)
3498 return false;
850e8d3d 3499
4d588c14
RH
3500 op0 = XEXP (x, 0);
3501 op1 = XEXP (x, 1);
3502
bf00cc0f 3503 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3504 replaced with proper base and index regs. */
3505 if (!strict
3506 && reload_in_progress
3507 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3508 && REG_P (op1))
3509 return true;
3510
3511 return (REG_P (op0) && REG_P (op1)
3512 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3513 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3514 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3515 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3516}
3517
48d72335 3518inline bool
a2369ed3 3519legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3520{
3521 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3522}
3523
48d72335 3524bool
4c81e946
FJ
3525macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3526{
c4ad648e 3527 if (!TARGET_MACHO || !flag_pic
9390387d 3528 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3529 return false;
3530 x = XEXP (x, 0);
4c81e946
FJ
3531
3532 if (GET_CODE (x) != LO_SUM)
3533 return false;
3534 if (GET_CODE (XEXP (x, 0)) != REG)
3535 return false;
3536 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3537 return false;
3538 x = XEXP (x, 1);
3539
3540 return CONSTANT_P (x);
3541}
3542
4d588c14 3543static bool
a2369ed3 3544legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3545{
3546 if (GET_CODE (x) != LO_SUM)
3547 return false;
3548 if (GET_CODE (XEXP (x, 0)) != REG)
3549 return false;
3550 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3551 return false;
54b695e7 3552 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3553 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3554 || mode == DDmode || mode == TDmode
17caeff2 3555 || mode == DImode))
f82f556d 3556 return false;
4d588c14
RH
3557 x = XEXP (x, 1);
3558
8622e235 3559 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3560 {
a29077da 3561 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3562 return false;
3563 if (TARGET_TOC)
3564 return false;
3565 if (GET_MODE_NUNITS (mode) != 1)
3566 return false;
5e5f01b9 3567 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3568 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3569 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3570 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3571 return false;
3572
3573 return CONSTANT_P (x);
3574 }
3575
3576 return false;
3577}
3578
3579
9ebbca7d
GK
3580/* Try machine-dependent ways of modifying an illegitimate address
3581 to be legitimate. If we find one, return the new, valid address.
3582 This is used from only one place: `memory_address' in explow.c.
3583
a4f6c312
SS
3584 OLDX is the address as it was before break_out_memory_refs was
3585 called. In some cases it is useful to look at this to decide what
3586 needs to be done.
9ebbca7d 3587
a4f6c312 3588 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3589
a4f6c312
SS
3590 It is always safe for this function to do nothing. It exists to
3591 recognize opportunities to optimize the output.
9ebbca7d
GK
3592
3593 On RS/6000, first check for the sum of a register with a constant
3594 integer that is out of range. If so, generate code to add the
3595 constant with the low-order 16 bits masked to the register and force
3596 this result into another register (this can be done with `cau').
3597 Then generate an address of REG+(CONST&0xffff), allowing for the
3598 possibility of bit 16 being a one.
3599
3600 Then check for the sum of a register and something not constant, try to
3601 load the other things into a register and return the sum. */
4d588c14 3602
9ebbca7d 3603rtx
a2369ed3
DJ
3604rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3605 enum machine_mode mode)
0ac081f6 3606{
c4501e62
JJ
3607 if (GET_CODE (x) == SYMBOL_REF)
3608 {
3609 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3610 if (model != 0)
3611 return rs6000_legitimize_tls_address (x, model);
3612 }
3613
f676971a 3614 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3615 && GET_CODE (XEXP (x, 0)) == REG
3616 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3617 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3618 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3619 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb
JM
3620 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3621 || mode == DImode))))
f676971a 3622 {
9ebbca7d
GK
3623 HOST_WIDE_INT high_int, low_int;
3624 rtx sum;
a65c591c
DE
3625 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3626 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3627 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3628 GEN_INT (high_int)), 0);
3629 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3630 }
f676971a 3631 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3632 && GET_CODE (XEXP (x, 0)) == REG
3633 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3634 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3635 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3636 || TARGET_POWERPC64
efc05e3c
PB
3637 || ((mode != DImode && mode != DFmode && mode != DDmode)
3638 || TARGET_E500_DOUBLE))
9ebbca7d 3639 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3640 && mode != TImode
3641 && mode != TFmode
3642 && mode != TDmode)
9ebbca7d
GK
3643 {
3644 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3645 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3646 }
0ac081f6
AH
3647 else if (ALTIVEC_VECTOR_MODE (mode))
3648 {
3649 rtx reg;
3650
3651 /* Make sure both operands are registers. */
3652 if (GET_CODE (x) == PLUS)
9f85ed45 3653 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3654 force_reg (Pmode, XEXP (x, 1)));
3655
3656 reg = force_reg (Pmode, x);
3657 return reg;
3658 }
4d4cbc0e 3659 else if (SPE_VECTOR_MODE (mode)
17caeff2 3660 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3661 || mode == DDmode || mode == TDmode
54b695e7 3662 || mode == DImode)))
a3170dc6 3663 {
54b695e7
AH
3664 if (mode == DImode)
3665 return NULL_RTX;
a3170dc6
AH
3666 /* We accept [reg + reg] and [reg + OFFSET]. */
3667
3668 if (GET_CODE (x) == PLUS)
61dd226f
NF
3669 {
3670 rtx op1 = XEXP (x, 0);
3671 rtx op2 = XEXP (x, 1);
3672 rtx y;
3673
3674 op1 = force_reg (Pmode, op1);
3675
3676 if (GET_CODE (op2) != REG
3677 && (GET_CODE (op2) != CONST_INT
3678 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3679 || (GET_MODE_SIZE (mode) > 8
3680 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3681 op2 = force_reg (Pmode, op2);
3682
3683 /* We can't always do [reg + reg] for these, because [reg +
3684 reg + offset] is not a legitimate addressing mode. */
3685 y = gen_rtx_PLUS (Pmode, op1, op2);
3686
3687 if (GET_MODE_SIZE (mode) > 8 && REG_P (op2))
3688 return force_reg (Pmode, y);
3689 else
3690 return y;
3691 }
a3170dc6
AH
3692
3693 return force_reg (Pmode, x);
3694 }
f1384257
AM
3695 else if (TARGET_ELF
3696 && TARGET_32BIT
3697 && TARGET_NO_TOC
3698 && ! flag_pic
9ebbca7d 3699 && GET_CODE (x) != CONST_INT
f676971a 3700 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3701 && CONSTANT_P (x)
6ac7bf2c
GK
3702 && GET_MODE_NUNITS (mode) == 1
3703 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3704 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3705 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3706 {
3707 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3708 emit_insn (gen_elf_high (reg, x));
3709 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3710 }
ee890fe2
SS
3711 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3712 && ! flag_pic
ab82a49f
AP
3713#if TARGET_MACHO
3714 && ! MACHO_DYNAMIC_NO_PIC_P
3715#endif
ee890fe2 3716 && GET_CODE (x) != CONST_INT
f676971a 3717 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3718 && CONSTANT_P (x)
4d4447b5
PB
3719 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3720 || (mode != DFmode && mode != DDmode))
f676971a 3721 && mode != DImode
ee890fe2
SS
3722 && mode != TImode)
3723 {
3724 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3725 emit_insn (gen_macho_high (reg, x));
3726 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3727 }
f676971a 3728 else if (TARGET_TOC
4d588c14 3729 && constant_pool_expr_p (x)
a9098fd0 3730 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3731 {
3732 return create_TOC_reference (x);
3733 }
3734 else
3735 return NULL_RTX;
3736}
258bfae2 3737
fdbe66f2 3738/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3739 We need to emit DTP-relative relocations. */
3740
fdbe66f2 3741static void
c973d557
JJ
3742rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3743{
3744 switch (size)
3745 {
3746 case 4:
3747 fputs ("\t.long\t", file);
3748 break;
3749 case 8:
3750 fputs (DOUBLE_INT_ASM_OP, file);
3751 break;
3752 default:
37409796 3753 gcc_unreachable ();
c973d557
JJ
3754 }
3755 output_addr_const (file, x);
3756 fputs ("@dtprel+0x8000", file);
3757}
3758
c4501e62
JJ
3759/* Construct the SYMBOL_REF for the tls_get_addr function. */
3760
3761static GTY(()) rtx rs6000_tls_symbol;
3762static rtx
863d938c 3763rs6000_tls_get_addr (void)
c4501e62
JJ
3764{
3765 if (!rs6000_tls_symbol)
3766 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3767
3768 return rs6000_tls_symbol;
3769}
3770
3771/* Construct the SYMBOL_REF for TLS GOT references. */
3772
3773static GTY(()) rtx rs6000_got_symbol;
3774static rtx
863d938c 3775rs6000_got_sym (void)
c4501e62
JJ
3776{
3777 if (!rs6000_got_symbol)
3778 {
3779 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3780 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3781 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3782 }
c4501e62
JJ
3783
3784 return rs6000_got_symbol;
3785}
3786
3787/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3788 this (thread-local) address. */
3789
3790static rtx
a2369ed3 3791rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3792{
3793 rtx dest, insn;
3794
3795 dest = gen_reg_rtx (Pmode);
3796 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3797 {
3798 rtx tlsreg;
3799
3800 if (TARGET_64BIT)
3801 {
3802 tlsreg = gen_rtx_REG (Pmode, 13);
3803 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3804 }
3805 else
3806 {
3807 tlsreg = gen_rtx_REG (Pmode, 2);
3808 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3809 }
3810 emit_insn (insn);
3811 }
3812 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3813 {
3814 rtx tlsreg, tmp;
3815
3816 tmp = gen_reg_rtx (Pmode);
3817 if (TARGET_64BIT)
3818 {
3819 tlsreg = gen_rtx_REG (Pmode, 13);
3820 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3821 }
3822 else
3823 {
3824 tlsreg = gen_rtx_REG (Pmode, 2);
3825 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3826 }
3827 emit_insn (insn);
3828 if (TARGET_64BIT)
3829 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3830 else
3831 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3832 emit_insn (insn);
3833 }
3834 else
3835 {
3836 rtx r3, got, tga, tmp1, tmp2, eqv;
3837
4fed8f8f
AM
3838 /* We currently use relocations like @got@tlsgd for tls, which
3839 means the linker will handle allocation of tls entries, placing
3840 them in the .got section. So use a pointer to the .got section,
3841 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3842 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3843 if (TARGET_64BIT)
972f427b 3844 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3845 else
3846 {
3847 if (flag_pic == 1)
3848 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3849 else
3850 {
3851 rtx gsym = rs6000_got_sym ();
3852 got = gen_reg_rtx (Pmode);
3853 if (flag_pic == 0)
3854 rs6000_emit_move (got, gsym, Pmode);
3855 else
3856 {
e65a3857 3857 rtx tmp3, mem;
c4501e62
JJ
3858 rtx first, last;
3859
c4501e62
JJ
3860 tmp1 = gen_reg_rtx (Pmode);
3861 tmp2 = gen_reg_rtx (Pmode);
3862 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3863 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3864
e65a3857
DE
3865 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3866 emit_move_insn (tmp1,
1de43f85 3867 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3868 emit_move_insn (tmp2, mem);
3869 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3870 last = emit_move_insn (got, tmp3);
bd94cb6e 3871 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3872 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3873 }
3874 }
3875 }
3876
3877 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3878 {
3879 r3 = gen_rtx_REG (Pmode, 3);
3880 if (TARGET_64BIT)
3881 insn = gen_tls_gd_64 (r3, got, addr);
3882 else
3883 insn = gen_tls_gd_32 (r3, got, addr);
3884 start_sequence ();
3885 emit_insn (insn);
3886 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3887 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3888 insn = emit_call_insn (insn);
3889 CONST_OR_PURE_CALL_P (insn) = 1;
3890 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3891 insn = get_insns ();
3892 end_sequence ();
3893 emit_libcall_block (insn, dest, r3, addr);
3894 }
3895 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3896 {
3897 r3 = gen_rtx_REG (Pmode, 3);
3898 if (TARGET_64BIT)
3899 insn = gen_tls_ld_64 (r3, got);
3900 else
3901 insn = gen_tls_ld_32 (r3, got);
3902 start_sequence ();
3903 emit_insn (insn);
3904 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3905 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3906 insn = emit_call_insn (insn);
3907 CONST_OR_PURE_CALL_P (insn) = 1;
3908 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3909 insn = get_insns ();
3910 end_sequence ();
3911 tmp1 = gen_reg_rtx (Pmode);
3912 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3913 UNSPEC_TLSLD);
3914 emit_libcall_block (insn, tmp1, r3, eqv);
3915 if (rs6000_tls_size == 16)
3916 {
3917 if (TARGET_64BIT)
3918 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3919 else
3920 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3921 }
3922 else if (rs6000_tls_size == 32)
3923 {
3924 tmp2 = gen_reg_rtx (Pmode);
3925 if (TARGET_64BIT)
3926 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3927 else
3928 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3929 emit_insn (insn);
3930 if (TARGET_64BIT)
3931 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3932 else
3933 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3934 }
3935 else
3936 {
3937 tmp2 = gen_reg_rtx (Pmode);
3938 if (TARGET_64BIT)
3939 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3940 else
3941 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3942 emit_insn (insn);
3943 insn = gen_rtx_SET (Pmode, dest,
3944 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3945 }
3946 emit_insn (insn);
3947 }
3948 else
3949 {
a7b376ee 3950 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3951 tmp2 = gen_reg_rtx (Pmode);
3952 if (TARGET_64BIT)
3953 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3954 else
3955 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3956 emit_insn (insn);
3957 if (TARGET_64BIT)
3958 insn = gen_tls_tls_64 (dest, tmp2, addr);
3959 else
3960 insn = gen_tls_tls_32 (dest, tmp2, addr);
3961 emit_insn (insn);
3962 }
3963 }
3964
3965 return dest;
3966}
3967
c4501e62
JJ
3968/* Return 1 if X contains a thread-local symbol. */
3969
3970bool
a2369ed3 3971rs6000_tls_referenced_p (rtx x)
c4501e62 3972{
cd413cab
AP
3973 if (! TARGET_HAVE_TLS)
3974 return false;
3975
c4501e62
JJ
3976 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3977}
3978
3979/* Return 1 if *X is a thread-local symbol. This is the same as
3980 rs6000_tls_symbol_ref except for the type of the unused argument. */
3981
9390387d 3982static int
a2369ed3 3983rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3984{
3985 return RS6000_SYMBOL_REF_TLS_P (*x);
3986}
3987
24ea750e
DJ
3988/* The convention appears to be to define this wherever it is used.
3989 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3990 is now used here. */
3991#ifndef REG_MODE_OK_FOR_BASE_P
3992#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3993#endif
3994
3995/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3996 replace the input X, or the original X if no replacement is called for.
3997 The output parameter *WIN is 1 if the calling macro should goto WIN,
3998 0 if it should not.
3999
4000 For RS/6000, we wish to handle large displacements off a base
4001 register by splitting the addend across an addiu/addis and the mem insn.
4002 This cuts number of extra insns needed from 3 to 1.
4003
4004 On Darwin, we use this to generate code for floating point constants.
4005 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4006 The Darwin code is inside #if TARGET_MACHO because only then is
4007 machopic_function_base_name() defined. */
4008rtx
f676971a 4009rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4010 int opnum, int type,
4011 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4012{
f676971a 4013 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4014 if (GET_CODE (x) == PLUS
4015 && GET_CODE (XEXP (x, 0)) == PLUS
4016 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4017 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4018 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4019 {
4020 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4021 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4022 opnum, (enum reload_type)type);
24ea750e
DJ
4023 *win = 1;
4024 return x;
4025 }
3deb2758 4026
24ea750e
DJ
4027#if TARGET_MACHO
4028 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4029 && GET_CODE (x) == LO_SUM
4030 && GET_CODE (XEXP (x, 0)) == PLUS
4031 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4032 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4033 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4034 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4035 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4036 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4037 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4038 {
4039 /* Result of previous invocation of this function on Darwin
6f317ef3 4040 floating point constant. */
24ea750e 4041 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4042 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4043 opnum, (enum reload_type)type);
24ea750e
DJ
4044 *win = 1;
4045 return x;
4046 }
4047#endif
4937d02d
DE
4048
4049 /* Force ld/std non-word aligned offset into base register by wrapping
4050 in offset 0. */
4051 if (GET_CODE (x) == PLUS
4052 && GET_CODE (XEXP (x, 0)) == REG
4053 && REGNO (XEXP (x, 0)) < 32
4054 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4055 && GET_CODE (XEXP (x, 1)) == CONST_INT
4056 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4057 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4058 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4059 && TARGET_POWERPC64)
4060 {
4061 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4062 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4063 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4064 opnum, (enum reload_type) type);
4065 *win = 1;
4066 return x;
4067 }
4068
24ea750e
DJ
4069 if (GET_CODE (x) == PLUS
4070 && GET_CODE (XEXP (x, 0)) == REG
4071 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4072 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4073 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4074 && !SPE_VECTOR_MODE (mode)
17caeff2 4075 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4076 || mode == DDmode || mode == TDmode
54b695e7 4077 || mode == DImode))
78c875e8 4078 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4079 {
4080 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4081 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4082 HOST_WIDE_INT high
c4ad648e 4083 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4084
4085 /* Check for 32-bit overflow. */
4086 if (high + low != val)
c4ad648e 4087 {
24ea750e
DJ
4088 *win = 0;
4089 return x;
4090 }
4091
4092 /* Reload the high part into a base reg; leave the low part
c4ad648e 4093 in the mem directly. */
24ea750e
DJ
4094
4095 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4096 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4097 GEN_INT (high)),
4098 GEN_INT (low));
24ea750e
DJ
4099
4100 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4101 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4102 opnum, (enum reload_type)type);
24ea750e
DJ
4103 *win = 1;
4104 return x;
4105 }
4937d02d 4106
24ea750e 4107 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4108 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4109 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4110#if TARGET_MACHO
4111 && DEFAULT_ABI == ABI_DARWIN
a29077da 4112 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4113#else
4114 && DEFAULT_ABI == ABI_V4
4115 && !flag_pic
4116#endif
7393f7f8 4117 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4118 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4119 without fprs. */
0d8c1c97 4120 && mode != TFmode
7393f7f8 4121 && mode != TDmode
7b5d92b2 4122 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4123 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4124 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4125 {
8308679f 4126#if TARGET_MACHO
a29077da
GK
4127 if (flag_pic)
4128 {
4129 rtx offset = gen_rtx_CONST (Pmode,
4130 gen_rtx_MINUS (Pmode, x,
11abc112 4131 machopic_function_base_sym ()));
a29077da
GK
4132 x = gen_rtx_LO_SUM (GET_MODE (x),
4133 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4134 gen_rtx_HIGH (Pmode, offset)), offset);
4135 }
4136 else
8308679f 4137#endif
a29077da 4138 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4139 gen_rtx_HIGH (Pmode, x), x);
a29077da 4140
24ea750e 4141 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4142 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4143 opnum, (enum reload_type)type);
24ea750e
DJ
4144 *win = 1;
4145 return x;
4146 }
4937d02d 4147
dec1f3aa
DE
4148 /* Reload an offset address wrapped by an AND that represents the
4149 masking of the lower bits. Strip the outer AND and let reload
4150 convert the offset address into an indirect address. */
4151 if (TARGET_ALTIVEC
4152 && ALTIVEC_VECTOR_MODE (mode)
4153 && GET_CODE (x) == AND
4154 && GET_CODE (XEXP (x, 0)) == PLUS
4155 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4156 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4157 && GET_CODE (XEXP (x, 1)) == CONST_INT
4158 && INTVAL (XEXP (x, 1)) == -16)
4159 {
4160 x = XEXP (x, 0);
4161 *win = 1;
4162 return x;
4163 }
4164
24ea750e 4165 if (TARGET_TOC
4d588c14 4166 && constant_pool_expr_p (x)
c1f11548 4167 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4168 {
194c524a 4169 x = create_TOC_reference (x);
24ea750e
DJ
4170 *win = 1;
4171 return x;
4172 }
4173 *win = 0;
4174 return x;
f676971a 4175}
24ea750e 4176
258bfae2
FS
4177/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4178 that is a valid memory address for an instruction.
4179 The MODE argument is the machine mode for the MEM expression
4180 that wants to use this address.
4181
4182 On the RS/6000, there are four valid address: a SYMBOL_REF that
4183 refers to a constant pool entry of an address (or the sum of it
4184 plus a constant), a short (16-bit signed) constant plus a register,
4185 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4186 auto-increment. For DFmode, DDmode and DImode with a constant plus
4187 register, we must ensure that both words are addressable or PowerPC64
4188 with offset word aligned.
258bfae2 4189
4d4447b5 4190 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4191 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4192 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4193 during assembly output. */
4194int
a2369ed3 4195rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4196{
850e8d3d
DN
4197 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4198 if (TARGET_ALTIVEC
4199 && ALTIVEC_VECTOR_MODE (mode)
4200 && GET_CODE (x) == AND
4201 && GET_CODE (XEXP (x, 1)) == CONST_INT
4202 && INTVAL (XEXP (x, 1)) == -16)
4203 x = XEXP (x, 0);
4204
c4501e62
JJ
4205 if (RS6000_SYMBOL_REF_TLS_P (x))
4206 return 0;
4d588c14 4207 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4208 return 1;
4209 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4210 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4211 && !SPE_VECTOR_MODE (mode)
429ec7dc 4212 && mode != TFmode
7393f7f8 4213 && mode != TDmode
54b695e7 4214 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4215 && !(TARGET_E500_DOUBLE
4216 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4217 && TARGET_UPDATE
4d588c14 4218 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4219 return 1;
d04b6e6e 4220 if (legitimate_small_data_p (mode, x))
258bfae2 4221 return 1;
4d588c14 4222 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4223 return 1;
4224 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4225 if (! reg_ok_strict
4226 && GET_CODE (x) == PLUS
4227 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4228 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4229 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4230 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4231 return 1;
76d2b81d 4232 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4233 return 1;
4234 if (mode != TImode
76d2b81d 4235 && mode != TFmode
7393f7f8 4236 && mode != TDmode
a3170dc6
AH
4237 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4238 || TARGET_POWERPC64
4d4447b5 4239 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4240 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4241 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4242 return 1;
6fb5fa3c
DB
4243 if (GET_CODE (x) == PRE_MODIFY
4244 && mode != TImode
4245 && mode != TFmode
4246 && mode != TDmode
4247 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4248 || TARGET_POWERPC64
4d4447b5 4249 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4250 && (TARGET_POWERPC64 || mode != DImode)
4251 && !ALTIVEC_VECTOR_MODE (mode)
4252 && !SPE_VECTOR_MODE (mode)
4253 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4254 && !(TARGET_E500_DOUBLE
4255 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4256 && TARGET_UPDATE
4257 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4258 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4259 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4260 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4261 return 1;
4d588c14 4262 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4263 return 1;
4264 return 0;
4265}
4d588c14
RH
4266
4267/* Go to LABEL if ADDR (a legitimate address expression)
4268 has an effect that depends on the machine mode it is used for.
4269
4270 On the RS/6000 this is true of all integral offsets (since AltiVec
4271 modes don't allow them) or is a pre-increment or decrement.
4272
4273 ??? Except that due to conceptual problems in offsettable_address_p
4274 we can't really report the problems of integral offsets. So leave
f676971a 4275 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4276 sub-words of a TFmode operand, which is what we had before. */
4277
4278bool
a2369ed3 4279rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4280{
4281 switch (GET_CODE (addr))
4282 {
4283 case PLUS:
4284 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4285 {
4286 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4287 return val + 12 + 0x8000 >= 0x10000;
4288 }
4289 break;
4290
4291 case LO_SUM:
4292 return true;
4293
6fb5fa3c
DB
4294 case PRE_INC:
4295 case PRE_DEC:
4296 case PRE_MODIFY:
4297 return TARGET_UPDATE;
4d588c14
RH
4298
4299 default:
4300 break;
4301 }
4302
4303 return false;
4304}
d8ecbcdb 4305
d04b6e6e
EB
4306/* More elaborate version of recog's offsettable_memref_p predicate
4307 that works around the ??? note of rs6000_mode_dependent_address.
4308 In particular it accepts
4309
4310 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4311
4312 in 32-bit mode, that the recog predicate rejects. */
4313
4314bool
4315rs6000_offsettable_memref_p (rtx op)
4316{
4317 if (!MEM_P (op))
4318 return false;
4319
4320 /* First mimic offsettable_memref_p. */
4321 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4322 return true;
4323
4324 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4325 the latter predicate knows nothing about the mode of the memory
4326 reference and, therefore, assumes that it is the largest supported
4327 mode (TFmode). As a consequence, legitimate offsettable memory
4328 references are rejected. rs6000_legitimate_offset_address_p contains
4329 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4330 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4331}
4332
d8ecbcdb
AH
4333/* Return number of consecutive hard regs needed starting at reg REGNO
4334 to hold something of mode MODE.
4335 This is ordinarily the length in words of a value of mode MODE
4336 but can be less for certain modes in special long registers.
4337
4338 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4339 scalar instructions. The upper 32 bits are only available to the
4340 SIMD instructions.
4341
4342 POWER and PowerPC GPRs hold 32 bits worth;
4343 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4344
4345int
4346rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4347{
4348 if (FP_REGNO_P (regno))
4349 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4350
4351 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4352 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4353
4354 if (ALTIVEC_REGNO_P (regno))
4355 return
4356 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4357
8521c414
JM
4358 /* The value returned for SCmode in the E500 double case is 2 for
4359 ABI compatibility; storing an SCmode value in a single register
4360 would require function_arg and rs6000_spe_function_arg to handle
4361 SCmode so as to pass the value correctly in a pair of
4362 registers. */
4363 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4364 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4365
d8ecbcdb
AH
4366 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4367}
2aa4498c
AH
4368
4369/* Change register usage conditional on target flags. */
4370void
4371rs6000_conditional_register_usage (void)
4372{
4373 int i;
4374
4375 /* Set MQ register fixed (already call_used) if not POWER
4376 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4377 be allocated. */
4378 if (! TARGET_POWER)
4379 fixed_regs[64] = 1;
4380
7c9ac5c0 4381 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4382 if (TARGET_64BIT)
4383 fixed_regs[13] = call_used_regs[13]
4384 = call_really_used_regs[13] = 1;
4385
4386 /* Conditionally disable FPRs. */
4387 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4388 for (i = 32; i < 64; i++)
4389 fixed_regs[i] = call_used_regs[i]
c4ad648e 4390 = call_really_used_regs[i] = 1;
2aa4498c 4391
7c9ac5c0
PH
4392 /* The TOC register is not killed across calls in a way that is
4393 visible to the compiler. */
4394 if (DEFAULT_ABI == ABI_AIX)
4395 call_really_used_regs[2] = 0;
4396
2aa4498c
AH
4397 if (DEFAULT_ABI == ABI_V4
4398 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4399 && flag_pic == 2)
4400 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4401
4402 if (DEFAULT_ABI == ABI_V4
4403 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4404 && flag_pic == 1)
4405 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4406 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4407 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4408
4409 if (DEFAULT_ABI == ABI_DARWIN
4410 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4411 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4412 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4413 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4414
b4db40bf
JJ
4415 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4416 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4417 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4418
2aa4498c
AH
4419 if (TARGET_SPE)
4420 {
4421 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4422 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4423 registers in prologues and epilogues. We no longer use r14
4424 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4425 pool for link-compatibility with older versions of GCC. Once
4426 "old" code has died out, we can return r14 to the allocation
4427 pool. */
4428 fixed_regs[14]
4429 = call_used_regs[14]
4430 = call_really_used_regs[14] = 1;
2aa4498c
AH
4431 }
4432
0db747be 4433 if (!TARGET_ALTIVEC)
2aa4498c
AH
4434 {
4435 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4436 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4437 call_really_used_regs[VRSAVE_REGNO] = 1;
4438 }
4439
0db747be
DE
4440 if (TARGET_ALTIVEC)
4441 global_regs[VSCR_REGNO] = 1;
4442
2aa4498c 4443 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4444 {
4445 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4446 call_used_regs[i] = call_really_used_regs[i] = 1;
4447
4448 /* AIX reserves VR20:31 in non-extended ABI mode. */
4449 if (TARGET_XCOFF)
4450 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4451 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4452 }
2aa4498c 4453}
fb4d4348 4454\f
a4f6c312
SS
4455/* Try to output insns to set TARGET equal to the constant C if it can
4456 be done in less than N insns. Do all computations in MODE.
4457 Returns the place where the output has been placed if it can be
4458 done and the insns have been emitted. If it would take more than N
4459 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4460
4461rtx
f676971a 4462rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4463 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4464{
af8cb5c5 4465 rtx result, insn, set;
2bfcf297
DB
4466 HOST_WIDE_INT c0, c1;
4467
37409796 4468 switch (mode)
2bfcf297 4469 {
37409796
NS
4470 case QImode:
4471 case HImode:
2bfcf297 4472 if (dest == NULL)
c4ad648e 4473 dest = gen_reg_rtx (mode);
2bfcf297
DB
4474 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4475 return dest;
bb8df8a6 4476
37409796 4477 case SImode:
b3a13419 4478 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4479
d448860e 4480 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4481 GEN_INT (INTVAL (source)
4482 & (~ (HOST_WIDE_INT) 0xffff))));
4483 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4484 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4485 GEN_INT (INTVAL (source) & 0xffff))));
4486 result = dest;
37409796
NS
4487 break;
4488
4489 case DImode:
4490 switch (GET_CODE (source))
af8cb5c5 4491 {
37409796 4492 case CONST_INT:
af8cb5c5
DE
4493 c0 = INTVAL (source);
4494 c1 = -(c0 < 0);
37409796 4495 break;
bb8df8a6 4496
37409796 4497 case CONST_DOUBLE:
2bfcf297 4498#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4499 c0 = CONST_DOUBLE_LOW (source);
4500 c1 = -(c0 < 0);
2bfcf297 4501#else
af8cb5c5
DE
4502 c0 = CONST_DOUBLE_LOW (source);
4503 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4504#endif
37409796
NS
4505 break;
4506
4507 default:
4508 gcc_unreachable ();
af8cb5c5 4509 }
af8cb5c5
DE
4510
4511 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4512 break;
4513
4514 default:
4515 gcc_unreachable ();
2bfcf297 4516 }
2bfcf297 4517
af8cb5c5
DE
4518 insn = get_last_insn ();
4519 set = single_set (insn);
4520 if (! CONSTANT_P (SET_SRC (set)))
4521 set_unique_reg_note (insn, REG_EQUAL, source);
4522
4523 return result;
2bfcf297
DB
4524}
4525
4526/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4527 fall back to a straight forward decomposition. We do this to avoid
4528 exponential run times encountered when looking for longer sequences
4529 with rs6000_emit_set_const. */
4530static rtx
a2369ed3 4531rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4532{
4533 if (!TARGET_POWERPC64)
4534 {
4535 rtx operand1, operand2;
4536
4537 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4538 DImode);
d448860e 4539 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4540 DImode);
4541 emit_move_insn (operand1, GEN_INT (c1));
4542 emit_move_insn (operand2, GEN_INT (c2));
4543 }
4544 else
4545 {
bc06712d 4546 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4547
bc06712d 4548 ud1 = c1 & 0xffff;
f921c9c9 4549 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4550#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4551 c2 = c1 >> 32;
2bfcf297 4552#endif
bc06712d 4553 ud3 = c2 & 0xffff;
f921c9c9 4554 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4555
f676971a 4556 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4557 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4558 {
bc06712d 4559 if (ud1 & 0x8000)
b78d48dd 4560 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4561 else
4562 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4563 }
2bfcf297 4564
f676971a 4565 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4566 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4567 {
bc06712d 4568 if (ud2 & 0x8000)
f676971a 4569 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4570 - 0x80000000));
252b88f7 4571 else
bc06712d
TR
4572 emit_move_insn (dest, GEN_INT (ud2 << 16));
4573 if (ud1 != 0)
d448860e
JH
4574 emit_move_insn (copy_rtx (dest),
4575 gen_rtx_IOR (DImode, copy_rtx (dest),
4576 GEN_INT (ud1)));
252b88f7 4577 }
f676971a 4578 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4579 || (ud4 == 0 && ! (ud3 & 0x8000)))
4580 {
4581 if (ud3 & 0x8000)
f676971a 4582 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4583 - 0x80000000));
4584 else
4585 emit_move_insn (dest, GEN_INT (ud3 << 16));
4586
4587 if (ud2 != 0)
d448860e
JH
4588 emit_move_insn (copy_rtx (dest),
4589 gen_rtx_IOR (DImode, copy_rtx (dest),
4590 GEN_INT (ud2)));
4591 emit_move_insn (copy_rtx (dest),
4592 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4593 GEN_INT (16)));
bc06712d 4594 if (ud1 != 0)
d448860e
JH
4595 emit_move_insn (copy_rtx (dest),
4596 gen_rtx_IOR (DImode, copy_rtx (dest),
4597 GEN_INT (ud1)));
bc06712d 4598 }
f676971a 4599 else
bc06712d
TR
4600 {
4601 if (ud4 & 0x8000)
f676971a 4602 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4603 - 0x80000000));
4604 else
4605 emit_move_insn (dest, GEN_INT (ud4 << 16));
4606
4607 if (ud3 != 0)
d448860e
JH
4608 emit_move_insn (copy_rtx (dest),
4609 gen_rtx_IOR (DImode, copy_rtx (dest),
4610 GEN_INT (ud3)));
2bfcf297 4611
d448860e
JH
4612 emit_move_insn (copy_rtx (dest),
4613 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4614 GEN_INT (32)));
bc06712d 4615 if (ud2 != 0)
d448860e
JH
4616 emit_move_insn (copy_rtx (dest),
4617 gen_rtx_IOR (DImode, copy_rtx (dest),
4618 GEN_INT (ud2 << 16)));
bc06712d 4619 if (ud1 != 0)
d448860e
JH
4620 emit_move_insn (copy_rtx (dest),
4621 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4622 }
4623 }
2bfcf297
DB
4624 return dest;
4625}
4626
76d2b81d 4627/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4628 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4629
4630static void
4631rs6000_eliminate_indexed_memrefs (rtx operands[2])
4632{
4633 if (GET_CODE (operands[0]) == MEM
4634 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4635 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4636 && ! reload_in_progress)
4637 operands[0]
4638 = replace_equiv_address (operands[0],
4639 copy_addr_to_reg (XEXP (operands[0], 0)));
4640
4641 if (GET_CODE (operands[1]) == MEM
4642 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4643 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4644 && ! reload_in_progress)
4645 operands[1]
4646 = replace_equiv_address (operands[1],
4647 copy_addr_to_reg (XEXP (operands[1], 0)));
4648}
4649
fb4d4348
GK
4650/* Emit a move from SOURCE to DEST in mode MODE. */
4651void
a2369ed3 4652rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4653{
4654 rtx operands[2];
4655 operands[0] = dest;
4656 operands[1] = source;
f676971a 4657
fb4d4348
GK
4658 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4659 if (GET_CODE (operands[1]) == CONST_DOUBLE
4660 && ! FLOAT_MODE_P (mode)
4661 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4662 {
4663 /* FIXME. This should never happen. */
4664 /* Since it seems that it does, do the safe thing and convert
4665 to a CONST_INT. */
2496c7bd 4666 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4667 }
37409796
NS
4668 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4669 || FLOAT_MODE_P (mode)
4670 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4671 || CONST_DOUBLE_LOW (operands[1]) < 0)
4672 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4673 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4674
c9e8cb32
DD
4675 /* Check if GCC is setting up a block move that will end up using FP
4676 registers as temporaries. We must make sure this is acceptable. */
4677 if (GET_CODE (operands[0]) == MEM
4678 && GET_CODE (operands[1]) == MEM
4679 && mode == DImode
41543739
GK
4680 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4681 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4682 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4683 ? 32 : MEM_ALIGN (operands[0])))
4684 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4685 ? 32
41543739
GK
4686 : MEM_ALIGN (operands[1]))))
4687 && ! MEM_VOLATILE_P (operands [0])
4688 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4689 {
41543739
GK
4690 emit_move_insn (adjust_address (operands[0], SImode, 0),
4691 adjust_address (operands[1], SImode, 0));
d448860e
JH
4692 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4693 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4694 return;
4695 }
630d42a0 4696
b3a13419 4697 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4698 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4699 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4700
a3170dc6
AH
4701 if (mode == SFmode && ! TARGET_POWERPC
4702 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4703 && GET_CODE (operands[0]) == MEM)
fb4d4348 4704 {
ffc14f31
GK
4705 int regnum;
4706
4707 if (reload_in_progress || reload_completed)
4708 regnum = true_regnum (operands[1]);
4709 else if (GET_CODE (operands[1]) == REG)
4710 regnum = REGNO (operands[1]);
4711 else
4712 regnum = -1;
f676971a 4713
fb4d4348
GK
4714 /* If operands[1] is a register, on POWER it may have
4715 double-precision data in it, so truncate it to single
4716 precision. */
4717 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4718 {
4719 rtx newreg;
b3a13419 4720 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4721 : gen_reg_rtx (mode));
fb4d4348
GK
4722 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4723 operands[1] = newreg;
4724 }
4725 }
4726
c4501e62
JJ
4727 /* Recognize the case where operand[1] is a reference to thread-local
4728 data and load its address to a register. */
84f52ebd 4729 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4730 {
84f52ebd
RH
4731 enum tls_model model;
4732 rtx tmp = operands[1];
4733 rtx addend = NULL;
4734
4735 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4736 {
4737 addend = XEXP (XEXP (tmp, 0), 1);
4738 tmp = XEXP (XEXP (tmp, 0), 0);
4739 }
4740
4741 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4742 model = SYMBOL_REF_TLS_MODEL (tmp);
4743 gcc_assert (model != 0);
4744
4745 tmp = rs6000_legitimize_tls_address (tmp, model);
4746 if (addend)
4747 {
4748 tmp = gen_rtx_PLUS (mode, tmp, addend);
4749 tmp = force_operand (tmp, operands[0]);
4750 }
4751 operands[1] = tmp;
c4501e62
JJ
4752 }
4753
8f4e6caf
RH
4754 /* Handle the case where reload calls us with an invalid address. */
4755 if (reload_in_progress && mode == Pmode
69ef87e2 4756 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4757 || ! nonimmediate_operand (operands[0], mode)))
4758 goto emit_set;
4759
a9baceb1
GK
4760 /* 128-bit constant floating-point values on Darwin should really be
4761 loaded as two parts. */
8521c414 4762 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4763 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4764 {
4765 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4766 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4767 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4768 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4769 simplify_gen_subreg (imode, operands[1], mode, 0),
4770 imode);
4771 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4772 GET_MODE_SIZE (imode)),
4773 simplify_gen_subreg (imode, operands[1], mode,
4774 GET_MODE_SIZE (imode)),
4775 imode);
a9baceb1
GK
4776 return;
4777 }
4778
e41b2a33
PB
4779 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4780 cfun->machine->sdmode_stack_slot =
4781 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4782
4783 if (reload_in_progress
4784 && mode == SDmode
4785 && MEM_P (operands[0])
4786 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4787 && REG_P (operands[1]))
4788 {
4789 if (FP_REGNO_P (REGNO (operands[1])))
4790 {
4791 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4792 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4793 emit_insn (gen_movsd_store (mem, operands[1]));
4794 }
4795 else if (INT_REGNO_P (REGNO (operands[1])))
4796 {
4797 rtx mem = adjust_address_nv (operands[0], mode, 4);
4798 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4799 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4800 }
4801 else
4802 gcc_unreachable();
4803 return;
4804 }
4805 if (reload_in_progress
4806 && mode == SDmode
4807 && REG_P (operands[0])
4808 && MEM_P (operands[1])
4809 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4810 {
4811 if (FP_REGNO_P (REGNO (operands[0])))
4812 {
4813 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4814 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4815 emit_insn (gen_movsd_load (operands[0], mem));
4816 }
4817 else if (INT_REGNO_P (REGNO (operands[0])))
4818 {
4819 rtx mem = adjust_address_nv (operands[1], mode, 4);
4820 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4821 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4822 }
4823 else
4824 gcc_unreachable();
4825 return;
4826 }
4827
fb4d4348
GK
4828 /* FIXME: In the long term, this switch statement should go away
4829 and be replaced by a sequence of tests based on things like
4830 mode == Pmode. */
4831 switch (mode)
4832 {
4833 case HImode:
4834 case QImode:
4835 if (CONSTANT_P (operands[1])
4836 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4837 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4838 break;
4839
06f4e019 4840 case TFmode:
7393f7f8 4841 case TDmode:
76d2b81d
DJ
4842 rs6000_eliminate_indexed_memrefs (operands);
4843 /* fall through */
4844
fb4d4348 4845 case DFmode:
7393f7f8 4846 case DDmode:
fb4d4348 4847 case SFmode:
e41b2a33 4848 case SDmode:
f676971a 4849 if (CONSTANT_P (operands[1])
fb4d4348 4850 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4851 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4852 break;
f676971a 4853
0ac081f6
AH
4854 case V16QImode:
4855 case V8HImode:
4856 case V4SFmode:
4857 case V4SImode:
a3170dc6
AH
4858 case V4HImode:
4859 case V2SFmode:
4860 case V2SImode:
00a892b8 4861 case V1DImode:
69ef87e2 4862 if (CONSTANT_P (operands[1])
d744e06e 4863 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4864 operands[1] = force_const_mem (mode, operands[1]);
4865 break;
f676971a 4866
fb4d4348 4867 case SImode:
a9098fd0 4868 case DImode:
fb4d4348
GK
4869 /* Use default pattern for address of ELF small data */
4870 if (TARGET_ELF
a9098fd0 4871 && mode == Pmode
f607bc57 4872 && DEFAULT_ABI == ABI_V4
f676971a 4873 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4874 || GET_CODE (operands[1]) == CONST)
4875 && small_data_operand (operands[1], mode))
fb4d4348
GK
4876 {
4877 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4878 return;
4879 }
4880
f607bc57 4881 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4882 && mode == Pmode && mode == SImode
4883 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4884 {
4885 emit_insn (gen_movsi_got (operands[0], operands[1]));
4886 return;
4887 }
4888
ee890fe2 4889 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4890 && TARGET_NO_TOC
4891 && ! flag_pic
a9098fd0 4892 && mode == Pmode
fb4d4348
GK
4893 && CONSTANT_P (operands[1])
4894 && GET_CODE (operands[1]) != HIGH
4895 && GET_CODE (operands[1]) != CONST_INT)
4896 {
b3a13419
ILT
4897 rtx target = (!can_create_pseudo_p ()
4898 ? operands[0]
4899 : gen_reg_rtx (mode));
fb4d4348
GK
4900
4901 /* If this is a function address on -mcall-aixdesc,
4902 convert it to the address of the descriptor. */
4903 if (DEFAULT_ABI == ABI_AIX
4904 && GET_CODE (operands[1]) == SYMBOL_REF
4905 && XSTR (operands[1], 0)[0] == '.')
4906 {
4907 const char *name = XSTR (operands[1], 0);
4908 rtx new_ref;
4909 while (*name == '.')
4910 name++;
4911 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4912 CONSTANT_POOL_ADDRESS_P (new_ref)
4913 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4914 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4915 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4916 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4917 operands[1] = new_ref;
4918 }
7509c759 4919
ee890fe2
SS
4920 if (DEFAULT_ABI == ABI_DARWIN)
4921 {
ab82a49f
AP
4922#if TARGET_MACHO
4923 if (MACHO_DYNAMIC_NO_PIC_P)
4924 {
4925 /* Take care of any required data indirection. */
4926 operands[1] = rs6000_machopic_legitimize_pic_address (
4927 operands[1], mode, operands[0]);
4928 if (operands[0] != operands[1])
4929 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4930 operands[0], operands[1]));
ab82a49f
AP
4931 return;
4932 }
4933#endif
b8a55285
AP
4934 emit_insn (gen_macho_high (target, operands[1]));
4935 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4936 return;
4937 }
4938
fb4d4348
GK
4939 emit_insn (gen_elf_high (target, operands[1]));
4940 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4941 return;
4942 }
4943
a9098fd0
GK
4944 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4945 and we have put it in the TOC, we just need to make a TOC-relative
4946 reference to it. */
4947 if (TARGET_TOC
4948 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4949 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4950 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4951 get_pool_mode (operands[1])))
fb4d4348 4952 {
a9098fd0 4953 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4954 }
a9098fd0
GK
4955 else if (mode == Pmode
4956 && CONSTANT_P (operands[1])
38886f37
AO
4957 && ((GET_CODE (operands[1]) != CONST_INT
4958 && ! easy_fp_constant (operands[1], mode))
4959 || (GET_CODE (operands[1]) == CONST_INT
4960 && num_insns_constant (operands[1], mode) > 2)
4961 || (GET_CODE (operands[0]) == REG
4962 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4963 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4964 && ! legitimate_constant_pool_address_p (operands[1])
4965 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4966 {
4967 /* Emit a USE operation so that the constant isn't deleted if
4968 expensive optimizations are turned on because nobody
4969 references it. This should only be done for operands that
4970 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4971 This should not be done for operands that contain LABEL_REFs.
4972 For now, we just handle the obvious case. */
4973 if (GET_CODE (operands[1]) != LABEL_REF)
4974 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4975
c859cda6 4976#if TARGET_MACHO
ee890fe2 4977 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4978 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4979 {
ee890fe2
SS
4980 operands[1] =
4981 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4982 operands[0]);
4983 if (operands[0] != operands[1])
4984 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4985 return;
4986 }
c859cda6 4987#endif
ee890fe2 4988
fb4d4348
GK
4989 /* If we are to limit the number of things we put in the TOC and
4990 this is a symbol plus a constant we can add in one insn,
4991 just put the symbol in the TOC and add the constant. Don't do
4992 this if reload is in progress. */
4993 if (GET_CODE (operands[1]) == CONST
4994 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4995 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4996 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4997 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4998 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4999 && ! side_effects_p (operands[0]))
5000 {
a4f6c312
SS
5001 rtx sym =
5002 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5003 rtx other = XEXP (XEXP (operands[1], 0), 1);
5004
a9098fd0
GK
5005 sym = force_reg (mode, sym);
5006 if (mode == SImode)
5007 emit_insn (gen_addsi3 (operands[0], sym, other));
5008 else
5009 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5010 return;
5011 }
5012
a9098fd0 5013 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5014
f676971a 5015 if (TARGET_TOC
4d588c14 5016 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5017 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5018 get_pool_constant (XEXP (operands[1], 0)),
5019 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5020 {
ba4828e0 5021 operands[1]
542a8afa 5022 = gen_const_mem (mode,
c4ad648e 5023 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5024 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5025 }
fb4d4348
GK
5026 }
5027 break;
a9098fd0 5028
fb4d4348 5029 case TImode:
76d2b81d
DJ
5030 rs6000_eliminate_indexed_memrefs (operands);
5031
27dc0551
DE
5032 if (TARGET_POWER)
5033 {
5034 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5035 gen_rtvec (2,
5036 gen_rtx_SET (VOIDmode,
5037 operands[0], operands[1]),
5038 gen_rtx_CLOBBER (VOIDmode,
5039 gen_rtx_SCRATCH (SImode)))));
5040 return;
5041 }
fb4d4348
GK
5042 break;
5043
5044 default:
37409796 5045 gcc_unreachable ();
fb4d4348
GK
5046 }
5047
a9098fd0
GK
5048 /* Above, we may have called force_const_mem which may have returned
5049 an invalid address. If we can, fix this up; otherwise, reload will
5050 have to deal with it. */
8f4e6caf
RH
5051 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5052 operands[1] = validize_mem (operands[1]);
a9098fd0 5053
8f4e6caf 5054 emit_set:
fb4d4348
GK
5055 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5056}
4697a36c 5057\f
2858f73a
GK
5058/* Nonzero if we can use a floating-point register to pass this arg. */
5059#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5060 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5061 && (CUM)->fregno <= FP_ARG_MAX_REG \
5062 && TARGET_HARD_FLOAT && TARGET_FPRS)
5063
5064/* Nonzero if we can use an AltiVec register to pass this arg. */
5065#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5066 (ALTIVEC_VECTOR_MODE (MODE) \
5067 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5068 && TARGET_ALTIVEC_ABI \
83953138 5069 && (NAMED))
2858f73a 5070
c6e8c921
GK
5071/* Return a nonzero value to say to return the function value in
5072 memory, just as large structures are always returned. TYPE will be
5073 the data type of the value, and FNTYPE will be the type of the
5074 function doing the returning, or @code{NULL} for libcalls.
5075
5076 The AIX ABI for the RS/6000 specifies that all structures are
5077 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5078 specifies that structures <= 8 bytes are returned in r3/r4, but a
5079 draft put them in memory, and GCC used to implement the draft
df01da37 5080 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5081 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5082 compatibility can change DRAFT_V4_STRUCT_RET to override the
5083 default, and -m switches get the final word. See
5084 rs6000_override_options for more details.
5085
5086 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5087 long double support is enabled. These values are returned in memory.
5088
5089 int_size_in_bytes returns -1 for variable size objects, which go in
5090 memory always. The cast to unsigned makes -1 > 8. */
5091
5092static bool
586de218 5093rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5094{
594a51fe
SS
5095 /* In the darwin64 abi, try to use registers for larger structs
5096 if possible. */
0b5383eb 5097 if (rs6000_darwin64_abi
594a51fe 5098 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5099 && int_size_in_bytes (type) > 0)
5100 {
5101 CUMULATIVE_ARGS valcum;
5102 rtx valret;
5103
5104 valcum.words = 0;
5105 valcum.fregno = FP_ARG_MIN_REG;
5106 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5107 /* Do a trial code generation as if this were going to be passed
5108 as an argument; if any part goes in memory, we return NULL. */
5109 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5110 if (valret)
5111 return false;
5112 /* Otherwise fall through to more conventional ABI rules. */
5113 }
594a51fe 5114
c6e8c921 5115 if (AGGREGATE_TYPE_P (type)
df01da37 5116 && (aix_struct_return
c6e8c921
GK
5117 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5118 return true;
b693336b 5119
bada2eb8
DE
5120 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5121 modes only exist for GCC vector types if -maltivec. */
5122 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5123 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5124 return false;
5125
b693336b
PB
5126 /* Return synthetic vectors in memory. */
5127 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5128 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5129 {
5130 static bool warned_for_return_big_vectors = false;
5131 if (!warned_for_return_big_vectors)
5132 {
d4ee4d25 5133 warning (0, "GCC vector returned by reference: "
b693336b
PB
5134 "non-standard ABI extension with no compatibility guarantee");
5135 warned_for_return_big_vectors = true;
5136 }
5137 return true;
5138 }
5139
602ea4d3 5140 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5141 return true;
ad630bef 5142
c6e8c921
GK
5143 return false;
5144}
5145
4697a36c
MM
5146/* Initialize a variable CUM of type CUMULATIVE_ARGS
5147 for a call to a function whose data type is FNTYPE.
5148 For a library call, FNTYPE is 0.
5149
5150 For incoming args we set the number of arguments in the prototype large
1c20ae99 5151 so we never return a PARALLEL. */
4697a36c
MM
5152
5153void
f676971a 5154init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5155 rtx libname ATTRIBUTE_UNUSED, int incoming,
5156 int libcall, int n_named_args)
4697a36c
MM
5157{
5158 static CUMULATIVE_ARGS zero_cumulative;
5159
5160 *cum = zero_cumulative;
5161 cum->words = 0;
5162 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5163 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5164 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5165 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5166 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5167 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5168 cum->stdarg = fntype
5169 && (TYPE_ARG_TYPES (fntype) != 0
5170 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5171 != void_type_node));
4697a36c 5172
0f6937fe
AM
5173 cum->nargs_prototype = 0;
5174 if (incoming || cum->prototype)
5175 cum->nargs_prototype = n_named_args;
4697a36c 5176
a5c76ee6 5177 /* Check for a longcall attribute. */
3eb4e360
AM
5178 if ((!fntype && rs6000_default_long_calls)
5179 || (fntype
5180 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5181 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5182 cum->call_cookie |= CALL_LONG;
6a4cee5f 5183
4697a36c
MM
5184 if (TARGET_DEBUG_ARG)
5185 {
5186 fprintf (stderr, "\ninit_cumulative_args:");
5187 if (fntype)
5188 {
5189 tree ret_type = TREE_TYPE (fntype);
5190 fprintf (stderr, " ret code = %s,",
5191 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5192 }
5193
6a4cee5f
MM
5194 if (cum->call_cookie & CALL_LONG)
5195 fprintf (stderr, " longcall,");
5196
4697a36c
MM
5197 fprintf (stderr, " proto = %d, nargs = %d\n",
5198 cum->prototype, cum->nargs_prototype);
5199 }
f676971a 5200
c4ad648e
AM
5201 if (fntype
5202 && !TARGET_ALTIVEC
5203 && TARGET_ALTIVEC_ABI
5204 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5205 {
c85ce869 5206 error ("cannot return value in vector register because"
c4ad648e 5207 " altivec instructions are disabled, use -maltivec"
c85ce869 5208 " to enable them");
c4ad648e 5209 }
4697a36c
MM
5210}
5211\f
fe984136
RH
5212/* Return true if TYPE must be passed on the stack and not in registers. */
5213
5214static bool
586de218 5215rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5216{
5217 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5218 return must_pass_in_stack_var_size (mode, type);
5219 else
5220 return must_pass_in_stack_var_size_or_pad (mode, type);
5221}
5222
c229cba9
DE
5223/* If defined, a C expression which determines whether, and in which
5224 direction, to pad out an argument with extra space. The value
5225 should be of type `enum direction': either `upward' to pad above
5226 the argument, `downward' to pad below, or `none' to inhibit
5227 padding.
5228
5229 For the AIX ABI structs are always stored left shifted in their
5230 argument slot. */
5231
9ebbca7d 5232enum direction
586de218 5233function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5234{
6e985040
AM
5235#ifndef AGGREGATE_PADDING_FIXED
5236#define AGGREGATE_PADDING_FIXED 0
5237#endif
5238#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5239#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5240#endif
5241
5242 if (!AGGREGATE_PADDING_FIXED)
5243 {
5244 /* GCC used to pass structures of the same size as integer types as
5245 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5246 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5247 passed padded downward, except that -mstrict-align further
5248 muddied the water in that multi-component structures of 2 and 4
5249 bytes in size were passed padded upward.
5250
5251 The following arranges for best compatibility with previous
5252 versions of gcc, but removes the -mstrict-align dependency. */
5253 if (BYTES_BIG_ENDIAN)
5254 {
5255 HOST_WIDE_INT size = 0;
5256
5257 if (mode == BLKmode)
5258 {
5259 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5260 size = int_size_in_bytes (type);
5261 }
5262 else
5263 size = GET_MODE_SIZE (mode);
5264
5265 if (size == 1 || size == 2 || size == 4)
5266 return downward;
5267 }
5268 return upward;
5269 }
5270
5271 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5272 {
5273 if (type != 0 && AGGREGATE_TYPE_P (type))
5274 return upward;
5275 }
c229cba9 5276
d3704c46
KH
5277 /* Fall back to the default. */
5278 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5279}
5280
b6c9286a 5281/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5282 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5283 PARM_BOUNDARY is used for all arguments.
f676971a 5284
84e9ad15
AM
5285 V.4 wants long longs and doubles to be double word aligned. Just
5286 testing the mode size is a boneheaded way to do this as it means
5287 that other types such as complex int are also double word aligned.
5288 However, we're stuck with this because changing the ABI might break
5289 existing library interfaces.
5290
b693336b
PB
5291 Doubleword align SPE vectors.
5292 Quadword align Altivec vectors.
5293 Quadword align large synthetic vector types. */
b6c9286a
MM
5294
5295int
b693336b 5296function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5297{
84e9ad15
AM
5298 if (DEFAULT_ABI == ABI_V4
5299 && (GET_MODE_SIZE (mode) == 8
5300 || (TARGET_HARD_FLOAT
5301 && TARGET_FPRS
7393f7f8 5302 && (mode == TFmode || mode == TDmode))))
4ed78545 5303 return 64;
ad630bef
DE
5304 else if (SPE_VECTOR_MODE (mode)
5305 || (type && TREE_CODE (type) == VECTOR_TYPE
5306 && int_size_in_bytes (type) >= 8
5307 && int_size_in_bytes (type) < 16))
e1f83b4d 5308 return 64;
ad630bef
DE
5309 else if (ALTIVEC_VECTOR_MODE (mode)
5310 || (type && TREE_CODE (type) == VECTOR_TYPE
5311 && int_size_in_bytes (type) >= 16))
0ac081f6 5312 return 128;
0b5383eb
DJ
5313 else if (rs6000_darwin64_abi && mode == BLKmode
5314 && type && TYPE_ALIGN (type) > 64)
5315 return 128;
9ebbca7d 5316 else
b6c9286a 5317 return PARM_BOUNDARY;
b6c9286a 5318}
c53bdcf5 5319
294bd182
AM
5320/* For a function parm of MODE and TYPE, return the starting word in
5321 the parameter area. NWORDS of the parameter area are already used. */
5322
5323static unsigned int
5324rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5325{
5326 unsigned int align;
5327 unsigned int parm_offset;
5328
5329 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5330 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5331 return nwords + (-(parm_offset + nwords) & align);
5332}
5333
c53bdcf5
AM
5334/* Compute the size (in words) of a function argument. */
5335
5336static unsigned long
5337rs6000_arg_size (enum machine_mode mode, tree type)
5338{
5339 unsigned long size;
5340
5341 if (mode != BLKmode)
5342 size = GET_MODE_SIZE (mode);
5343 else
5344 size = int_size_in_bytes (type);
5345
5346 if (TARGET_32BIT)
5347 return (size + 3) >> 2;
5348 else
5349 return (size + 7) >> 3;
5350}
b6c9286a 5351\f
0b5383eb 5352/* Use this to flush pending int fields. */
594a51fe
SS
5353
5354static void
0b5383eb
DJ
5355rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5356 HOST_WIDE_INT bitpos)
594a51fe 5357{
0b5383eb
DJ
5358 unsigned int startbit, endbit;
5359 int intregs, intoffset;
5360 enum machine_mode mode;
594a51fe 5361
0b5383eb
DJ
5362 if (cum->intoffset == -1)
5363 return;
594a51fe 5364
0b5383eb
DJ
5365 intoffset = cum->intoffset;
5366 cum->intoffset = -1;
5367
5368 if (intoffset % BITS_PER_WORD != 0)
5369 {
5370 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5371 MODE_INT, 0);
5372 if (mode == BLKmode)
594a51fe 5373 {
0b5383eb
DJ
5374 /* We couldn't find an appropriate mode, which happens,
5375 e.g., in packed structs when there are 3 bytes to load.
5376 Back intoffset back to the beginning of the word in this
5377 case. */
5378 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5379 }
594a51fe 5380 }
0b5383eb
DJ
5381
5382 startbit = intoffset & -BITS_PER_WORD;
5383 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5384 intregs = (endbit - startbit) / BITS_PER_WORD;
5385 cum->words += intregs;
5386}
5387
5388/* The darwin64 ABI calls for us to recurse down through structs,
5389 looking for elements passed in registers. Unfortunately, we have
5390 to track int register count here also because of misalignments
5391 in powerpc alignment mode. */
5392
5393static void
5394rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5395 tree type,
5396 HOST_WIDE_INT startbitpos)
5397{
5398 tree f;
5399
5400 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5401 if (TREE_CODE (f) == FIELD_DECL)
5402 {
5403 HOST_WIDE_INT bitpos = startbitpos;
5404 tree ftype = TREE_TYPE (f);
70fb00df
AP
5405 enum machine_mode mode;
5406 if (ftype == error_mark_node)
5407 continue;
5408 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5409
5410 if (DECL_SIZE (f) != 0
5411 && host_integerp (bit_position (f), 1))
5412 bitpos += int_bit_position (f);
5413
5414 /* ??? FIXME: else assume zero offset. */
5415
5416 if (TREE_CODE (ftype) == RECORD_TYPE)
5417 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5418 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5419 {
5420 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5421 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5422 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5423 }
5424 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5425 {
5426 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5427 cum->vregno++;
5428 cum->words += 2;
5429 }
5430 else if (cum->intoffset == -1)
5431 cum->intoffset = bitpos;
5432 }
594a51fe
SS
5433}
5434
4697a36c
MM
5435/* Update the data in CUM to advance over an argument
5436 of mode MODE and data type TYPE.
b2d04ecf
AM
5437 (TYPE is null for libcalls where that information may not be available.)
5438
5439 Note that for args passed by reference, function_arg will be called
5440 with MODE and TYPE set to that of the pointer to the arg, not the arg
5441 itself. */
4697a36c
MM
5442
5443void
f676971a 5444function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5445 tree type, int named, int depth)
4697a36c 5446{
0b5383eb
DJ
5447 int size;
5448
594a51fe
SS
5449 /* Only tick off an argument if we're not recursing. */
5450 if (depth == 0)
5451 cum->nargs_prototype--;
4697a36c 5452
ad630bef
DE
5453 if (TARGET_ALTIVEC_ABI
5454 && (ALTIVEC_VECTOR_MODE (mode)
5455 || (type && TREE_CODE (type) == VECTOR_TYPE
5456 && int_size_in_bytes (type) == 16)))
0ac081f6 5457 {
4ed78545
AM
5458 bool stack = false;
5459
2858f73a 5460 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5461 {
6d0ef01e
HP
5462 cum->vregno++;
5463 if (!TARGET_ALTIVEC)
c85ce869 5464 error ("cannot pass argument in vector register because"
6d0ef01e 5465 " altivec instructions are disabled, use -maltivec"
c85ce869 5466 " to enable them");
4ed78545
AM
5467
5468 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5469 even if it is going to be passed in a vector register.
4ed78545
AM
5470 Darwin does the same for variable-argument functions. */
5471 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5472 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5473 stack = true;
6d0ef01e 5474 }
4ed78545
AM
5475 else
5476 stack = true;
5477
5478 if (stack)
c4ad648e 5479 {
a594a19c 5480 int align;
f676971a 5481
2858f73a
GK
5482 /* Vector parameters must be 16-byte aligned. This places
5483 them at 2 mod 4 in terms of words in 32-bit mode, since
5484 the parameter save area starts at offset 24 from the
5485 stack. In 64-bit mode, they just have to start on an
5486 even word, since the parameter save area is 16-byte
5487 aligned. Space for GPRs is reserved even if the argument
5488 will be passed in memory. */
5489 if (TARGET_32BIT)
4ed78545 5490 align = (2 - cum->words) & 3;
2858f73a
GK
5491 else
5492 align = cum->words & 1;
c53bdcf5 5493 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5494
a594a19c
GK
5495 if (TARGET_DEBUG_ARG)
5496 {
f676971a 5497 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5498 cum->words, align);
5499 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5500 cum->nargs_prototype, cum->prototype,
2858f73a 5501 GET_MODE_NAME (mode));
a594a19c
GK
5502 }
5503 }
0ac081f6 5504 }
a4b0320c 5505 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5506 && !cum->stdarg
5507 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5508 cum->sysv_gregno++;
594a51fe
SS
5509
5510 else if (rs6000_darwin64_abi
5511 && mode == BLKmode
0b5383eb
DJ
5512 && TREE_CODE (type) == RECORD_TYPE
5513 && (size = int_size_in_bytes (type)) > 0)
5514 {
5515 /* Variable sized types have size == -1 and are
5516 treated as if consisting entirely of ints.
5517 Pad to 16 byte boundary if needed. */
5518 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5519 && (cum->words % 2) != 0)
5520 cum->words++;
5521 /* For varargs, we can just go up by the size of the struct. */
5522 if (!named)
5523 cum->words += (size + 7) / 8;
5524 else
5525 {
5526 /* It is tempting to say int register count just goes up by
5527 sizeof(type)/8, but this is wrong in a case such as
5528 { int; double; int; } [powerpc alignment]. We have to
5529 grovel through the fields for these too. */
5530 cum->intoffset = 0;
5531 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5532 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5533 size * BITS_PER_UNIT);
5534 }
5535 }
f607bc57 5536 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5537 {
a3170dc6 5538 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5539 && (mode == SFmode || mode == DFmode
e41b2a33 5540 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5541 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5542 {
2d83f070
JJ
5543 /* _Decimal128 must use an even/odd register pair. This assumes
5544 that the register number is odd when fregno is odd. */
5545 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5546 cum->fregno++;
5547
5548 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5549 <= FP_ARG_V4_MAX_REG)
602ea4d3 5550 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5551 else
5552 {
602ea4d3 5553 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5554 if (mode == DFmode || mode == TFmode
5555 || mode == DDmode || mode == TDmode)
c4ad648e 5556 cum->words += cum->words & 1;
c53bdcf5 5557 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5558 }
4697a36c 5559 }
4cc833b7
RH
5560 else
5561 {
b2d04ecf 5562 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5563 int gregno = cum->sysv_gregno;
5564
4ed78545
AM
5565 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5566 (r7,r8) or (r9,r10). As does any other 2 word item such
5567 as complex int due to a historical mistake. */
5568 if (n_words == 2)
5569 gregno += (1 - gregno) & 1;
4cc833b7 5570
4ed78545 5571 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5572 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5573 {
4ed78545
AM
5574 /* Long long and SPE vectors are aligned on the stack.
5575 So are other 2 word items such as complex int due to
5576 a historical mistake. */
4cc833b7
RH
5577 if (n_words == 2)
5578 cum->words += cum->words & 1;
5579 cum->words += n_words;
5580 }
4697a36c 5581
4cc833b7
RH
5582 /* Note: continuing to accumulate gregno past when we've started
5583 spilling to the stack indicates the fact that we've started
5584 spilling to the stack to expand_builtin_saveregs. */
5585 cum->sysv_gregno = gregno + n_words;
5586 }
4697a36c 5587
4cc833b7
RH
5588 if (TARGET_DEBUG_ARG)
5589 {
5590 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5591 cum->words, cum->fregno);
5592 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5593 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5594 fprintf (stderr, "mode = %4s, named = %d\n",
5595 GET_MODE_NAME (mode), named);
5596 }
4697a36c
MM
5597 }
5598 else
4cc833b7 5599 {
b2d04ecf 5600 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5601 int start_words = cum->words;
5602 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5603
294bd182 5604 cum->words = align_words + n_words;
4697a36c 5605
ebb109ad 5606 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5607 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5608 {
5609 /* _Decimal128 must be passed in an even/odd float register pair.
5610 This assumes that the register number is odd when fregno is
5611 odd. */
5612 if (mode == TDmode && (cum->fregno % 2) == 1)
5613 cum->fregno++;
5614 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5615 }
4cc833b7
RH
5616
5617 if (TARGET_DEBUG_ARG)
5618 {
5619 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5620 cum->words, cum->fregno);
5621 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5622 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5623 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5624 named, align_words - start_words, depth);
4cc833b7
RH
5625 }
5626 }
4697a36c 5627}
a6c9bed4 5628
f82f556d
AH
5629static rtx
5630spe_build_register_parallel (enum machine_mode mode, int gregno)
5631{
17caeff2 5632 rtx r1, r3, r5, r7;
f82f556d 5633
37409796 5634 switch (mode)
f82f556d 5635 {
37409796 5636 case DFmode:
4d4447b5 5637 case DDmode:
54b695e7
AH
5638 r1 = gen_rtx_REG (DImode, gregno);
5639 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5640 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5641
5642 case DCmode:
17caeff2 5643 case TFmode:
4d4447b5 5644 case TDmode:
54b695e7
AH
5645 r1 = gen_rtx_REG (DImode, gregno);
5646 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5647 r3 = gen_rtx_REG (DImode, gregno + 2);
5648 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5649 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5650
17caeff2
JM
5651 case TCmode:
5652 r1 = gen_rtx_REG (DImode, gregno);
5653 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5654 r3 = gen_rtx_REG (DImode, gregno + 2);
5655 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5656 r5 = gen_rtx_REG (DImode, gregno + 4);
5657 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5658 r7 = gen_rtx_REG (DImode, gregno + 6);
5659 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5660 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5661
37409796
NS
5662 default:
5663 gcc_unreachable ();
f82f556d 5664 }
f82f556d 5665}
b78d48dd 5666
f82f556d 5667/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5668static rtx
f676971a 5669rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5670 tree type)
a6c9bed4 5671{
f82f556d
AH
5672 int gregno = cum->sysv_gregno;
5673
5674 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5675 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5676 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5677 || mode == DDmode || mode == TDmode
5678 || mode == DCmode || mode == TCmode))
f82f556d 5679 {
b5870bee
AH
5680 int n_words = rs6000_arg_size (mode, type);
5681
f82f556d 5682 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5683 if (mode == DFmode || mode == DDmode)
b5870bee 5684 gregno += (1 - gregno) & 1;
f82f556d 5685
b5870bee
AH
5686 /* Multi-reg args are not split between registers and stack. */
5687 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5688 return NULL_RTX;
5689
5690 return spe_build_register_parallel (mode, gregno);
5691 }
a6c9bed4
AH
5692 if (cum->stdarg)
5693 {
c53bdcf5 5694 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5695
5696 /* SPE vectors are put in odd registers. */
5697 if (n_words == 2 && (gregno & 1) == 0)
5698 gregno += 1;
5699
5700 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5701 {
5702 rtx r1, r2;
5703 enum machine_mode m = SImode;
5704
5705 r1 = gen_rtx_REG (m, gregno);
5706 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5707 r2 = gen_rtx_REG (m, gregno + 1);
5708 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5709 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5710 }
5711 else
b78d48dd 5712 return NULL_RTX;
a6c9bed4
AH
5713 }
5714 else
5715 {
f82f556d
AH
5716 if (gregno <= GP_ARG_MAX_REG)
5717 return gen_rtx_REG (mode, gregno);
a6c9bed4 5718 else
b78d48dd 5719 return NULL_RTX;
a6c9bed4
AH
5720 }
5721}
5722
0b5383eb
DJ
5723/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5724 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5725
0b5383eb 5726static void
bb8df8a6 5727rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5728 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5729{
0b5383eb
DJ
5730 enum machine_mode mode;
5731 unsigned int regno;
5732 unsigned int startbit, endbit;
5733 int this_regno, intregs, intoffset;
5734 rtx reg;
594a51fe 5735
0b5383eb
DJ
5736 if (cum->intoffset == -1)
5737 return;
5738
5739 intoffset = cum->intoffset;
5740 cum->intoffset = -1;
5741
5742 /* If this is the trailing part of a word, try to only load that
5743 much into the register. Otherwise load the whole register. Note
5744 that in the latter case we may pick up unwanted bits. It's not a
5745 problem at the moment but may wish to revisit. */
5746
5747 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5748 {
0b5383eb
DJ
5749 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5750 MODE_INT, 0);
5751 if (mode == BLKmode)
5752 {
5753 /* We couldn't find an appropriate mode, which happens,
5754 e.g., in packed structs when there are 3 bytes to load.
5755 Back intoffset back to the beginning of the word in this
5756 case. */
5757 intoffset = intoffset & -BITS_PER_WORD;
5758 mode = word_mode;
5759 }
5760 }
5761 else
5762 mode = word_mode;
5763
5764 startbit = intoffset & -BITS_PER_WORD;
5765 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5766 intregs = (endbit - startbit) / BITS_PER_WORD;
5767 this_regno = cum->words + intoffset / BITS_PER_WORD;
5768
5769 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5770 cum->use_stack = 1;
bb8df8a6 5771
0b5383eb
DJ
5772 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5773 if (intregs <= 0)
5774 return;
5775
5776 intoffset /= BITS_PER_UNIT;
5777 do
5778 {
5779 regno = GP_ARG_MIN_REG + this_regno;
5780 reg = gen_rtx_REG (mode, regno);
5781 rvec[(*k)++] =
5782 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5783
5784 this_regno += 1;
5785 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5786 mode = word_mode;
5787 intregs -= 1;
5788 }
5789 while (intregs > 0);
5790}
5791
5792/* Recursive workhorse for the following. */
5793
5794static void
586de218 5795rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5796 HOST_WIDE_INT startbitpos, rtx rvec[],
5797 int *k)
5798{
5799 tree f;
5800
5801 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5802 if (TREE_CODE (f) == FIELD_DECL)
5803 {
5804 HOST_WIDE_INT bitpos = startbitpos;
5805 tree ftype = TREE_TYPE (f);
70fb00df
AP
5806 enum machine_mode mode;
5807 if (ftype == error_mark_node)
5808 continue;
5809 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5810
5811 if (DECL_SIZE (f) != 0
5812 && host_integerp (bit_position (f), 1))
5813 bitpos += int_bit_position (f);
5814
5815 /* ??? FIXME: else assume zero offset. */
5816
5817 if (TREE_CODE (ftype) == RECORD_TYPE)
5818 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5819 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5820 {
0b5383eb
DJ
5821#if 0
5822 switch (mode)
594a51fe 5823 {
0b5383eb
DJ
5824 case SCmode: mode = SFmode; break;
5825 case DCmode: mode = DFmode; break;
5826 case TCmode: mode = TFmode; break;
5827 default: break;
594a51fe 5828 }
0b5383eb
DJ
5829#endif
5830 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5831 rvec[(*k)++]
bb8df8a6 5832 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5833 gen_rtx_REG (mode, cum->fregno++),
5834 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5835 if (mode == TFmode || mode == TDmode)
0b5383eb 5836 cum->fregno++;
594a51fe 5837 }
0b5383eb
DJ
5838 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5839 {
5840 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5841 rvec[(*k)++]
bb8df8a6
EC
5842 = gen_rtx_EXPR_LIST (VOIDmode,
5843 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5844 GEN_INT (bitpos / BITS_PER_UNIT));
5845 }
5846 else if (cum->intoffset == -1)
5847 cum->intoffset = bitpos;
5848 }
5849}
594a51fe 5850
0b5383eb
DJ
5851/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5852 the register(s) to be used for each field and subfield of a struct
5853 being passed by value, along with the offset of where the
5854 register's value may be found in the block. FP fields go in FP
5855 register, vector fields go in vector registers, and everything
bb8df8a6 5856 else goes in int registers, packed as in memory.
8ff40a74 5857
0b5383eb
DJ
5858 This code is also used for function return values. RETVAL indicates
5859 whether this is the case.
8ff40a74 5860
a4d05547 5861 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5862 calling convention. */
594a51fe 5863
0b5383eb 5864static rtx
586de218 5865rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5866 int named, bool retval)
5867{
5868 rtx rvec[FIRST_PSEUDO_REGISTER];
5869 int k = 1, kbase = 1;
5870 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5871 /* This is a copy; modifications are not visible to our caller. */
5872 CUMULATIVE_ARGS copy_cum = *orig_cum;
5873 CUMULATIVE_ARGS *cum = &copy_cum;
5874
5875 /* Pad to 16 byte boundary if needed. */
5876 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5877 && (cum->words % 2) != 0)
5878 cum->words++;
5879
5880 cum->intoffset = 0;
5881 cum->use_stack = 0;
5882 cum->named = named;
5883
5884 /* Put entries into rvec[] for individual FP and vector fields, and
5885 for the chunks of memory that go in int regs. Note we start at
5886 element 1; 0 is reserved for an indication of using memory, and
5887 may or may not be filled in below. */
5888 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5889 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5890
5891 /* If any part of the struct went on the stack put all of it there.
5892 This hack is because the generic code for
5893 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5894 parts of the struct are not at the beginning. */
5895 if (cum->use_stack)
5896 {
5897 if (retval)
5898 return NULL_RTX; /* doesn't go in registers at all */
5899 kbase = 0;
5900 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5901 }
5902 if (k > 1 || cum->use_stack)
5903 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5904 else
5905 return NULL_RTX;
5906}
5907
b78d48dd
FJ
5908/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5909
5910static rtx
ec6376ab 5911rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5912{
ec6376ab
AM
5913 int n_units;
5914 int i, k;
5915 rtx rvec[GP_ARG_NUM_REG + 1];
5916
5917 if (align_words >= GP_ARG_NUM_REG)
5918 return NULL_RTX;
5919
5920 n_units = rs6000_arg_size (mode, type);
5921
5922 /* Optimize the simple case where the arg fits in one gpr, except in
5923 the case of BLKmode due to assign_parms assuming that registers are
5924 BITS_PER_WORD wide. */
5925 if (n_units == 0
5926 || (n_units == 1 && mode != BLKmode))
5927 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5928
5929 k = 0;
5930 if (align_words + n_units > GP_ARG_NUM_REG)
5931 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5932 using a magic NULL_RTX component.
79773478
AM
5933 This is not strictly correct. Only some of the arg belongs in
5934 memory, not all of it. However, the normal scheme using
5935 function_arg_partial_nregs can result in unusual subregs, eg.
5936 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5937 store the whole arg to memory is often more efficient than code
5938 to store pieces, and we know that space is available in the right
5939 place for the whole arg. */
ec6376ab
AM
5940 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5941
5942 i = 0;
5943 do
36a454e1 5944 {
ec6376ab
AM
5945 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5946 rtx off = GEN_INT (i++ * 4);
5947 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5948 }
ec6376ab
AM
5949 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5950
5951 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5952}
5953
4697a36c
MM
5954/* Determine where to put an argument to a function.
5955 Value is zero to push the argument on the stack,
5956 or a hard register in which to store the argument.
5957
5958 MODE is the argument's machine mode.
5959 TYPE is the data type of the argument (as a tree).
5960 This is null for libcalls where that information may
5961 not be available.
5962 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5963 the preceding args and about the function being called. It is
5964 not modified in this routine.
4697a36c
MM
5965 NAMED is nonzero if this argument is a named parameter
5966 (otherwise it is an extra parameter matching an ellipsis).
5967
5968 On RS/6000 the first eight words of non-FP are normally in registers
5969 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5970 Under V.4, the first 8 FP args are in registers.
5971
5972 If this is floating-point and no prototype is specified, we use
5973 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5974 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5975 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5976 doesn't support PARALLEL anyway.
5977
5978 Note that for args passed by reference, function_arg will be called
5979 with MODE and TYPE set to that of the pointer to the arg, not the arg
5980 itself. */
4697a36c 5981
9390387d 5982rtx
f676971a 5983function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5984 tree type, int named)
4697a36c 5985{
4cc833b7 5986 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5987
a4f6c312
SS
5988 /* Return a marker to indicate whether CR1 needs to set or clear the
5989 bit that V.4 uses to say fp args were passed in registers.
5990 Assume that we don't need the marker for software floating point,
5991 or compiler generated library calls. */
4697a36c
MM
5992 if (mode == VOIDmode)
5993 {
f607bc57 5994 if (abi == ABI_V4
b9599e46 5995 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5996 && (cum->stdarg
5997 || (cum->nargs_prototype < 0
5998 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5999 {
a3170dc6
AH
6000 /* For the SPE, we need to crxor CR6 always. */
6001 if (TARGET_SPE_ABI)
6002 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6003 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6004 return GEN_INT (cum->call_cookie
6005 | ((cum->fregno == FP_ARG_MIN_REG)
6006 ? CALL_V4_SET_FP_ARGS
6007 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6008 }
4697a36c 6009
7509c759 6010 return GEN_INT (cum->call_cookie);
4697a36c
MM
6011 }
6012
0b5383eb
DJ
6013 if (rs6000_darwin64_abi && mode == BLKmode
6014 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6015 {
0b5383eb 6016 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6017 if (rslt != NULL_RTX)
6018 return rslt;
6019 /* Else fall through to usual handling. */
6020 }
6021
2858f73a 6022 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6023 if (TARGET_64BIT && ! cum->prototype)
6024 {
c4ad648e
AM
6025 /* Vector parameters get passed in vector register
6026 and also in GPRs or memory, in absence of prototype. */
6027 int align_words;
6028 rtx slot;
6029 align_words = (cum->words + 1) & ~1;
6030
6031 if (align_words >= GP_ARG_NUM_REG)
6032 {
6033 slot = NULL_RTX;
6034 }
6035 else
6036 {
6037 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6038 }
6039 return gen_rtx_PARALLEL (mode,
6040 gen_rtvec (2,
6041 gen_rtx_EXPR_LIST (VOIDmode,
6042 slot, const0_rtx),
6043 gen_rtx_EXPR_LIST (VOIDmode,
6044 gen_rtx_REG (mode, cum->vregno),
6045 const0_rtx)));
c72d6c26
HP
6046 }
6047 else
6048 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6049 else if (TARGET_ALTIVEC_ABI
6050 && (ALTIVEC_VECTOR_MODE (mode)
6051 || (type && TREE_CODE (type) == VECTOR_TYPE
6052 && int_size_in_bytes (type) == 16)))
0ac081f6 6053 {
2858f73a 6054 if (named || abi == ABI_V4)
a594a19c 6055 return NULL_RTX;
0ac081f6 6056 else
a594a19c
GK
6057 {
6058 /* Vector parameters to varargs functions under AIX or Darwin
6059 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6060 int align, align_words, n_words;
6061 enum machine_mode part_mode;
a594a19c
GK
6062
6063 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6064 2 mod 4 in terms of words in 32-bit mode, since the parameter
6065 save area starts at offset 24 from the stack. In 64-bit mode,
6066 they just have to start on an even word, since the parameter
6067 save area is 16-byte aligned. */
6068 if (TARGET_32BIT)
4ed78545 6069 align = (2 - cum->words) & 3;
2858f73a
GK
6070 else
6071 align = cum->words & 1;
a594a19c
GK
6072 align_words = cum->words + align;
6073
6074 /* Out of registers? Memory, then. */
6075 if (align_words >= GP_ARG_NUM_REG)
6076 return NULL_RTX;
ec6376ab
AM
6077
6078 if (TARGET_32BIT && TARGET_POWERPC64)
6079 return rs6000_mixed_function_arg (mode, type, align_words);
6080
2858f73a
GK
6081 /* The vector value goes in GPRs. Only the part of the
6082 value in GPRs is reported here. */
ec6376ab
AM
6083 part_mode = mode;
6084 n_words = rs6000_arg_size (mode, type);
6085 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6086 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6087 is either wholly in GPRs or half in GPRs and half not. */
6088 part_mode = DImode;
ec6376ab
AM
6089
6090 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6091 }
0ac081f6 6092 }
f82f556d
AH
6093 else if (TARGET_SPE_ABI && TARGET_SPE
6094 && (SPE_VECTOR_MODE (mode)
18f63bfa 6095 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 6096 || mode == DDmode
17caeff2
JM
6097 || mode == DCmode
6098 || mode == TFmode
7393f7f8 6099 || mode == TDmode
17caeff2 6100 || mode == TCmode))))
a6c9bed4 6101 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6102
f607bc57 6103 else if (abi == ABI_V4)
4697a36c 6104 {
a3170dc6 6105 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6106 && (mode == SFmode || mode == DFmode
7393f7f8 6107 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6108 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6109 {
2d83f070
JJ
6110 /* _Decimal128 must use an even/odd register pair. This assumes
6111 that the register number is odd when fregno is odd. */
6112 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6113 cum->fregno++;
6114
6115 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6116 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6117 return gen_rtx_REG (mode, cum->fregno);
6118 else
b78d48dd 6119 return NULL_RTX;
4cc833b7
RH
6120 }
6121 else
6122 {
b2d04ecf 6123 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6124 int gregno = cum->sysv_gregno;
6125
4ed78545
AM
6126 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6127 (r7,r8) or (r9,r10). As does any other 2 word item such
6128 as complex int due to a historical mistake. */
6129 if (n_words == 2)
6130 gregno += (1 - gregno) & 1;
4cc833b7 6131
4ed78545 6132 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6133 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6134 return NULL_RTX;
ec6376ab
AM
6135
6136 if (TARGET_32BIT && TARGET_POWERPC64)
6137 return rs6000_mixed_function_arg (mode, type,
6138 gregno - GP_ARG_MIN_REG);
6139 return gen_rtx_REG (mode, gregno);
4cc833b7 6140 }
4697a36c 6141 }
4cc833b7
RH
6142 else
6143 {
294bd182 6144 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6145
2d83f070
JJ
6146 /* _Decimal128 must be passed in an even/odd float register pair.
6147 This assumes that the register number is odd when fregno is odd. */
6148 if (mode == TDmode && (cum->fregno % 2) == 1)
6149 cum->fregno++;
6150
2858f73a 6151 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6152 {
ec6376ab
AM
6153 rtx rvec[GP_ARG_NUM_REG + 1];
6154 rtx r;
6155 int k;
c53bdcf5
AM
6156 bool needs_psave;
6157 enum machine_mode fmode = mode;
c53bdcf5
AM
6158 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6159
6160 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6161 {
c53bdcf5
AM
6162 /* Currently, we only ever need one reg here because complex
6163 doubles are split. */
7393f7f8
BE
6164 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6165 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6166
7393f7f8
BE
6167 /* Long double or _Decimal128 split over regs and memory. */
6168 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6169 }
c53bdcf5
AM
6170
6171 /* Do we also need to pass this arg in the parameter save
6172 area? */
6173 needs_psave = (type
6174 && (cum->nargs_prototype <= 0
6175 || (DEFAULT_ABI == ABI_AIX
de17c25f 6176 && TARGET_XL_COMPAT
c53bdcf5
AM
6177 && align_words >= GP_ARG_NUM_REG)));
6178
6179 if (!needs_psave && mode == fmode)
ec6376ab 6180 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6181
ec6376ab 6182 k = 0;
c53bdcf5
AM
6183 if (needs_psave)
6184 {
ec6376ab 6185 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6186 This piece must come first, before the fprs. */
c53bdcf5
AM
6187 if (align_words < GP_ARG_NUM_REG)
6188 {
6189 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6190
6191 if (align_words + n_words > GP_ARG_NUM_REG
6192 || (TARGET_32BIT && TARGET_POWERPC64))
6193 {
6194 /* If this is partially on the stack, then we only
6195 include the portion actually in registers here. */
6196 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6197 rtx off;
79773478
AM
6198 int i = 0;
6199 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6200 /* Not all of the arg fits in gprs. Say that it
6201 goes in memory too, using a magic NULL_RTX
6202 component. Also see comment in
6203 rs6000_mixed_function_arg for why the normal
6204 function_arg_partial_nregs scheme doesn't work
6205 in this case. */
6206 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6207 const0_rtx);
ec6376ab
AM
6208 do
6209 {
6210 r = gen_rtx_REG (rmode,
6211 GP_ARG_MIN_REG + align_words);
2e6c9641 6212 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6213 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6214 }
6215 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6216 }
6217 else
6218 {
6219 /* The whole arg fits in gprs. */
6220 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6221 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6222 }
c53bdcf5 6223 }
ec6376ab
AM
6224 else
6225 /* It's entirely in memory. */
6226 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6227 }
6228
ec6376ab
AM
6229 /* Describe where this piece goes in the fprs. */
6230 r = gen_rtx_REG (fmode, cum->fregno);
6231 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6232
6233 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6234 }
6235 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6236 {
ec6376ab
AM
6237 if (TARGET_32BIT && TARGET_POWERPC64)
6238 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6239
4eeca74f
AM
6240 if (mode == BLKmode)
6241 mode = Pmode;
6242
b2d04ecf
AM
6243 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6244 }
4cc833b7
RH
6245 else
6246 return NULL_RTX;
4697a36c 6247 }
4697a36c
MM
6248}
6249\f
ec6376ab 6250/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6251 the number of bytes passed in registers. For args passed entirely in
6252 registers or entirely in memory, zero. When an arg is described by a
6253 PARALLEL, perhaps using more than one register type, this function
6254 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6255
78a52f11
RH
6256static int
6257rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6258 tree type, bool named)
4697a36c 6259{
c53bdcf5 6260 int ret = 0;
ec6376ab 6261 int align_words;
c53bdcf5 6262
f607bc57 6263 if (DEFAULT_ABI == ABI_V4)
4697a36c 6264 return 0;
4697a36c 6265
c53bdcf5
AM
6266 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6267 && cum->nargs_prototype >= 0)
6268 return 0;
6269
0b5383eb
DJ
6270 /* In this complicated case we just disable the partial_nregs code. */
6271 if (rs6000_darwin64_abi && mode == BLKmode
6272 && TREE_CODE (type) == RECORD_TYPE
6273 && int_size_in_bytes (type) > 0)
6274 return 0;
6275
294bd182 6276 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6277
79773478
AM
6278 if (USE_FP_FOR_ARG_P (cum, mode, type))
6279 {
fb63c729
AM
6280 /* If we are passing this arg in the fixed parameter save area
6281 (gprs or memory) as well as fprs, then this function should
79773478
AM
6282 return the number of partial bytes passed in the parameter
6283 save area rather than partial bytes passed in fprs. */
6284 if (type
6285 && (cum->nargs_prototype <= 0
6286 || (DEFAULT_ABI == ABI_AIX
6287 && TARGET_XL_COMPAT
6288 && align_words >= GP_ARG_NUM_REG)))
6289 return 0;
6290 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6291 > FP_ARG_MAX_REG + 1)
ac7e839c 6292 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6293 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6294 return 0;
6295 }
6296
ec6376ab
AM
6297 if (align_words < GP_ARG_NUM_REG
6298 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6299 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6300
c53bdcf5 6301 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6302 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6303
c53bdcf5 6304 return ret;
4697a36c
MM
6305}
6306\f
6307/* A C expression that indicates when an argument must be passed by
6308 reference. If nonzero for an argument, a copy of that argument is
6309 made in memory and a pointer to the argument is passed instead of
6310 the argument itself. The pointer is passed in whatever way is
6311 appropriate for passing a pointer to that type.
6312
b2d04ecf
AM
6313 Under V.4, aggregates and long double are passed by reference.
6314
6315 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6316 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6317
6318 As an extension to all ABIs, variable sized types are passed by
6319 reference. */
4697a36c 6320
8cd5a4e0 6321static bool
f676971a 6322rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6323 enum machine_mode mode, const_tree type,
bada2eb8 6324 bool named ATTRIBUTE_UNUSED)
4697a36c 6325{
602ea4d3 6326 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6327 {
6328 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6329 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6330 return 1;
6331 }
6332
6333 if (!type)
6334 return 0;
4697a36c 6335
bada2eb8
DE
6336 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6337 {
6338 if (TARGET_DEBUG_ARG)
6339 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6340 return 1;
6341 }
6342
6343 if (int_size_in_bytes (type) < 0)
6344 {
6345 if (TARGET_DEBUG_ARG)
6346 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6347 return 1;
6348 }
6349
6350 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6351 modes only exist for GCC vector types if -maltivec. */
6352 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6353 {
6354 if (TARGET_DEBUG_ARG)
6355 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6356 return 1;
6357 }
b693336b
PB
6358
6359 /* Pass synthetic vectors in memory. */
bada2eb8 6360 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6361 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6362 {
6363 static bool warned_for_pass_big_vectors = false;
6364 if (TARGET_DEBUG_ARG)
6365 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6366 if (!warned_for_pass_big_vectors)
6367 {
d4ee4d25 6368 warning (0, "GCC vector passed by reference: "
b693336b
PB
6369 "non-standard ABI extension with no compatibility guarantee");
6370 warned_for_pass_big_vectors = true;
6371 }
6372 return 1;
6373 }
6374
b2d04ecf 6375 return 0;
4697a36c 6376}
5985c7a6
FJ
6377
6378static void
2d9db8eb 6379rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6380{
6381 int i;
6382 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6383
6384 if (nregs == 0)
6385 return;
6386
c4ad648e 6387 for (i = 0; i < nregs; i++)
5985c7a6 6388 {
9390387d 6389 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6390 if (reload_completed)
c4ad648e
AM
6391 {
6392 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6393 tem = NULL_RTX;
6394 else
6395 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6396 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6397 }
5985c7a6
FJ
6398 else
6399 tem = replace_equiv_address (tem, XEXP (tem, 0));
6400
37409796 6401 gcc_assert (tem);
5985c7a6
FJ
6402
6403 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6404 }
6405}
4697a36c
MM
6406\f
6407/* Perform any needed actions needed for a function that is receiving a
f676971a 6408 variable number of arguments.
4697a36c
MM
6409
6410 CUM is as above.
6411
6412 MODE and TYPE are the mode and type of the current parameter.
6413
6414 PRETEND_SIZE is a variable that should be set to the amount of stack
6415 that must be pushed by the prolog to pretend that our caller pushed
6416 it.
6417
6418 Normally, this macro will push all remaining incoming registers on the
6419 stack and set PRETEND_SIZE to the length of the registers pushed. */
6420
c6e8c921 6421static void
f676971a 6422setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6423 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6424 int no_rtl)
4697a36c 6425{
4cc833b7
RH
6426 CUMULATIVE_ARGS next_cum;
6427 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6428 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6429 int first_reg_offset;
6430 alias_set_type set;
4697a36c 6431
f31bf321 6432 /* Skip the last named argument. */
d34c5b80 6433 next_cum = *cum;
594a51fe 6434 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6435
f607bc57 6436 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6437 {
5b667039
JJ
6438 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6439
60e2d0ca 6440 if (! no_rtl)
5b667039
JJ
6441 {
6442 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6443 HOST_WIDE_INT offset = 0;
6444
6445 /* Try to optimize the size of the varargs save area.
6446 The ABI requires that ap.reg_save_area is doubleword
6447 aligned, but we don't need to allocate space for all
6448 the bytes, only those to which we actually will save
6449 anything. */
6450 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6451 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6452 if (TARGET_HARD_FLOAT && TARGET_FPRS
6453 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6454 && cfun->va_list_fpr_size)
6455 {
6456 if (gpr_reg_num)
6457 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6458 * UNITS_PER_FP_WORD;
6459 if (cfun->va_list_fpr_size
6460 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6461 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6462 else
6463 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6464 * UNITS_PER_FP_WORD;
6465 }
6466 if (gpr_reg_num)
6467 {
6468 offset = -((first_reg_offset * reg_size) & ~7);
6469 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6470 {
6471 gpr_reg_num = cfun->va_list_gpr_size;
6472 if (reg_size == 4 && (first_reg_offset & 1))
6473 gpr_reg_num++;
6474 }
6475 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6476 }
6477 else if (fpr_size)
6478 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6479 * UNITS_PER_FP_WORD
6480 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6481
5b667039
JJ
6482 if (gpr_size + fpr_size)
6483 {
6484 rtx reg_save_area
6485 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6486 gcc_assert (GET_CODE (reg_save_area) == MEM);
6487 reg_save_area = XEXP (reg_save_area, 0);
6488 if (GET_CODE (reg_save_area) == PLUS)
6489 {
6490 gcc_assert (XEXP (reg_save_area, 0)
6491 == virtual_stack_vars_rtx);
6492 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6493 offset += INTVAL (XEXP (reg_save_area, 1));
6494 }
6495 else
6496 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6497 }
6498
6499 cfun->machine->varargs_save_offset = offset;
6500 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6501 }
4697a36c 6502 }
60e2d0ca 6503 else
4697a36c 6504 {
d34c5b80 6505 first_reg_offset = next_cum.words;
4cc833b7 6506 save_area = virtual_incoming_args_rtx;
4697a36c 6507
fe984136 6508 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6509 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6510 }
4697a36c 6511
dfafc897 6512 set = get_varargs_alias_set ();
9d30f3c1
JJ
6513 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6514 && cfun->va_list_gpr_size)
4cc833b7 6515 {
9d30f3c1
JJ
6516 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6517
6518 if (va_list_gpr_counter_field)
6519 {
6520 /* V4 va_list_gpr_size counts number of registers needed. */
6521 if (nregs > cfun->va_list_gpr_size)
6522 nregs = cfun->va_list_gpr_size;
6523 }
6524 else
6525 {
6526 /* char * va_list instead counts number of bytes needed. */
6527 if (nregs > cfun->va_list_gpr_size / reg_size)
6528 nregs = cfun->va_list_gpr_size / reg_size;
6529 }
6530
dfafc897 6531 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6532 plus_constant (save_area,
13e2e16e
DE
6533 first_reg_offset * reg_size));
6534 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6535 set_mem_alias_set (mem, set);
8ac61af7 6536 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6537
f676971a 6538 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6539 nregs);
4697a36c
MM
6540 }
6541
4697a36c 6542 /* Save FP registers if needed. */
f607bc57 6543 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6544 && TARGET_HARD_FLOAT && TARGET_FPRS
6545 && ! no_rtl
9d30f3c1
JJ
6546 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6547 && cfun->va_list_fpr_size)
4697a36c 6548 {
9d30f3c1 6549 int fregno = next_cum.fregno, nregs;
9ebbca7d 6550 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6551 rtx lab = gen_label_rtx ();
5b667039
JJ
6552 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6553 * UNITS_PER_FP_WORD);
4697a36c 6554
c4ad648e
AM
6555 emit_jump_insn
6556 (gen_rtx_SET (VOIDmode,
6557 pc_rtx,
6558 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6559 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6560 const0_rtx),
39403d82 6561 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6562 pc_rtx)));
6563
9d30f3c1
JJ
6564 for (nregs = 0;
6565 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6566 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6567 {
5496b36f 6568 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6569 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6570 set_mem_alias_set (mem, set);
94ff898d 6571 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6572 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6573 }
4cc833b7
RH
6574
6575 emit_label (lab);
4697a36c 6576 }
4697a36c 6577}
4697a36c 6578
dfafc897 6579/* Create the va_list data type. */
2c4974b7 6580
c35d187f
RH
6581static tree
6582rs6000_build_builtin_va_list (void)
dfafc897 6583{
64c2816f 6584 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6585
9ebbca7d
GK
6586 /* For AIX, prefer 'char *' because that's what the system
6587 header files like. */
f607bc57 6588 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6589 return build_pointer_type (char_type_node);
dfafc897 6590
f1e639b1 6591 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6592 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6593
f676971a 6594 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6595 unsigned_char_type_node);
f676971a 6596 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6597 unsigned_char_type_node);
64c2816f
DT
6598 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6599 every user file. */
6600 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6601 short_unsigned_type_node);
dfafc897
FS
6602 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6603 ptr_type_node);
6604 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6605 ptr_type_node);
6606
9d30f3c1
JJ
6607 va_list_gpr_counter_field = f_gpr;
6608 va_list_fpr_counter_field = f_fpr;
6609
dfafc897
FS
6610 DECL_FIELD_CONTEXT (f_gpr) = record;
6611 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6612 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6613 DECL_FIELD_CONTEXT (f_ovf) = record;
6614 DECL_FIELD_CONTEXT (f_sav) = record;
6615
bab45a51
FS
6616 TREE_CHAIN (record) = type_decl;
6617 TYPE_NAME (record) = type_decl;
dfafc897
FS
6618 TYPE_FIELDS (record) = f_gpr;
6619 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6620 TREE_CHAIN (f_fpr) = f_res;
6621 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6622 TREE_CHAIN (f_ovf) = f_sav;
6623
6624 layout_type (record);
6625
6626 /* The correct type is an array type of one element. */
6627 return build_array_type (record, build_index_type (size_zero_node));
6628}
6629
6630/* Implement va_start. */
6631
d7bd8aeb 6632static void
a2369ed3 6633rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6634{
dfafc897 6635 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6636 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6637 tree gpr, fpr, ovf, sav, t;
2c4974b7 6638
dfafc897 6639 /* Only SVR4 needs something special. */
f607bc57 6640 if (DEFAULT_ABI != ABI_V4)
dfafc897 6641 {
e5faf155 6642 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6643 return;
6644 }
6645
973a648b 6646 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6647 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6648 f_res = TREE_CHAIN (f_fpr);
6649 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6650 f_sav = TREE_CHAIN (f_ovf);
6651
872a65b5 6652 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6653 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6654 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6655 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6656 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6657
6658 /* Count number of gp and fp argument registers used. */
4cc833b7 6659 words = current_function_args_info.words;
987732e0
DE
6660 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6661 GP_ARG_NUM_REG);
6662 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6663 FP_ARG_NUM_REG);
dfafc897
FS
6664
6665 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6666 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6667 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6668 words, n_gpr, n_fpr);
dfafc897 6669
9d30f3c1
JJ
6670 if (cfun->va_list_gpr_size)
6671 {
07beea0d 6672 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6673 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6674 TREE_SIDE_EFFECTS (t) = 1;
6675 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6676 }
58c8adc1 6677
9d30f3c1
JJ
6678 if (cfun->va_list_fpr_size)
6679 {
07beea0d 6680 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6681 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6682 TREE_SIDE_EFFECTS (t) = 1;
6683 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6684 }
dfafc897
FS
6685
6686 /* Find the overflow area. */
6687 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6688 if (words != 0)
5be014d5
AP
6689 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6690 size_int (words * UNITS_PER_WORD));
07beea0d 6691 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6692 TREE_SIDE_EFFECTS (t) = 1;
6693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6694
9d30f3c1
JJ
6695 /* If there were no va_arg invocations, don't set up the register
6696 save area. */
6697 if (!cfun->va_list_gpr_size
6698 && !cfun->va_list_fpr_size
6699 && n_gpr < GP_ARG_NUM_REG
6700 && n_fpr < FP_ARG_V4_MAX_REG)
6701 return;
6702
dfafc897
FS
6703 /* Find the register save area. */
6704 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6705 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6706 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6707 size_int (cfun->machine->varargs_save_offset));
07beea0d 6708 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6709 TREE_SIDE_EFFECTS (t) = 1;
6710 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6711}
6712
6713/* Implement va_arg. */
6714
23a60a04
JM
6715tree
6716rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6717{
cd3ce9b4
JM
6718 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6719 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6720 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6721 tree lab_false, lab_over, addr;
6722 int align;
6723 tree ptrtype = build_pointer_type (type);
7393f7f8 6724 int regalign = 0;
cd3ce9b4 6725
08b0dc1b
RH
6726 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6727 {
6728 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6729 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6730 }
6731
cd3ce9b4
JM
6732 if (DEFAULT_ABI != ABI_V4)
6733 {
08b0dc1b 6734 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6735 {
6736 tree elem_type = TREE_TYPE (type);
6737 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6738 int elem_size = GET_MODE_SIZE (elem_mode);
6739
6740 if (elem_size < UNITS_PER_WORD)
6741 {
23a60a04 6742 tree real_part, imag_part;
cd3ce9b4
JM
6743 tree post = NULL_TREE;
6744
23a60a04
JM
6745 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6746 &post);
6747 /* Copy the value into a temporary, lest the formal temporary
6748 be reused out from under us. */
6749 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6750 append_to_statement_list (post, pre_p);
6751
23a60a04
JM
6752 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6753 post_p);
cd3ce9b4 6754
47a25a46 6755 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6756 }
6757 }
6758
23a60a04 6759 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6760 }
6761
6762 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6763 f_fpr = TREE_CHAIN (f_gpr);
6764 f_res = TREE_CHAIN (f_fpr);
6765 f_ovf = TREE_CHAIN (f_res);
6766 f_sav = TREE_CHAIN (f_ovf);
6767
872a65b5 6768 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6769 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6770 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6771 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6772 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6773
6774 size = int_size_in_bytes (type);
6775 rsize = (size + 3) / 4;
6776 align = 1;
6777
08b0dc1b 6778 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6779 && (TYPE_MODE (type) == SFmode
6780 || TYPE_MODE (type) == DFmode
7393f7f8 6781 || TYPE_MODE (type) == TFmode
e41b2a33 6782 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6783 || TYPE_MODE (type) == DDmode
6784 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6785 {
6786 /* FP args go in FP registers, if present. */
cd3ce9b4 6787 reg = fpr;
602ea4d3 6788 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6789 sav_ofs = 8*4;
6790 sav_scale = 8;
e41b2a33 6791 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6792 align = 8;
6793 }
6794 else
6795 {
6796 /* Otherwise into GP registers. */
cd3ce9b4
JM
6797 reg = gpr;
6798 n_reg = rsize;
6799 sav_ofs = 0;
6800 sav_scale = 4;
6801 if (n_reg == 2)
6802 align = 8;
6803 }
6804
6805 /* Pull the value out of the saved registers.... */
6806
6807 lab_over = NULL;
6808 addr = create_tmp_var (ptr_type_node, "addr");
6809 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6810
6811 /* AltiVec vectors never go in registers when -mabi=altivec. */
6812 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6813 align = 16;
6814 else
6815 {
6816 lab_false = create_artificial_label ();
6817 lab_over = create_artificial_label ();
6818
6819 /* Long long and SPE vectors are aligned in the registers.
6820 As are any other 2 gpr item such as complex int due to a
6821 historical mistake. */
6822 u = reg;
602ea4d3 6823 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6824 {
7393f7f8 6825 regalign = 1;
cd3ce9b4 6826 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6827 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6828 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6829 }
7393f7f8
BE
6830 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6831 reg number is 0 for f1, so we want to make it odd. */
6832 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6833 {
6834 regalign = 1;
383e91e4
JJ
6835 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg,
6836 build_int_cst (TREE_TYPE (reg), 1));
7393f7f8
BE
6837 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6838 }
cd3ce9b4 6839
95674810 6840 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6841 t = build2 (GE_EXPR, boolean_type_node, u, t);
6842 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6843 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6844 gimplify_and_add (t, pre_p);
6845
6846 t = sav;
6847 if (sav_ofs)
5be014d5 6848 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6849
8fb632eb
ZD
6850 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6851 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6852 u = fold_convert (sizetype, u);
6853 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6854 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6855
e41b2a33
PB
6856 /* _Decimal32 varargs are located in the second word of the 64-bit
6857 FP register for 32-bit binaries. */
6858 if (!TARGET_POWERPC64 && TYPE_MODE (type) == SDmode)
6859 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6860
07beea0d 6861 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6862 gimplify_and_add (t, pre_p);
6863
6864 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6865 gimplify_and_add (t, pre_p);
6866
6867 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6868 append_to_statement_list (t, pre_p);
6869
7393f7f8 6870 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6871 {
6872 /* Ensure that we don't find any more args in regs.
7393f7f8 6873 Alignment has taken care of for special cases. */
383e91e4
JJ
6874 t = build_gimple_modify_stmt (reg,
6875 build_int_cst (TREE_TYPE (reg), 8));
cd3ce9b4
JM
6876 gimplify_and_add (t, pre_p);
6877 }
6878 }
6879
6880 /* ... otherwise out of the overflow area. */
6881
6882 /* Care for on-stack alignment if needed. */
6883 t = ovf;
6884 if (align != 1)
6885 {
5be014d5
AP
6886 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6887 t = fold_convert (sizetype, t);
4a90aeeb 6888 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6889 size_int (-align));
6890 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6891 }
6892 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6893
07beea0d 6894 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6895 gimplify_and_add (u, pre_p);
6896
5be014d5 6897 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6898 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6899 gimplify_and_add (t, pre_p);
6900
6901 if (lab_over)
6902 {
6903 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6904 append_to_statement_list (t, pre_p);
6905 }
6906
0cfbc62b
JM
6907 if (STRICT_ALIGNMENT
6908 && (TYPE_ALIGN (type)
6909 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6910 {
6911 /* The value (of type complex double, for example) may not be
6912 aligned in memory in the saved registers, so copy via a
6913 temporary. (This is the same code as used for SPARC.) */
6914 tree tmp = create_tmp_var (type, "va_arg_tmp");
6915 tree dest_addr = build_fold_addr_expr (tmp);
6916
5039610b
SL
6917 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6918 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6919
6920 gimplify_and_add (copy, pre_p);
6921 addr = dest_addr;
6922 }
6923
08b0dc1b 6924 addr = fold_convert (ptrtype, addr);
872a65b5 6925 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6926}
6927
0ac081f6
AH
6928/* Builtins. */
6929
58646b77
PB
6930static void
6931def_builtin (int mask, const char *name, tree type, int code)
6932{
96038623 6933 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6934 {
6935 if (rs6000_builtin_decls[code])
6936 abort ();
6937
6938 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6939 add_builtin_function (name, type, code, BUILT_IN_MD,
6940 NULL, NULL_TREE);
58646b77
PB
6941 }
6942}
0ac081f6 6943
24408032
AH
6944/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6945
2212663f 6946static const struct builtin_description bdesc_3arg[] =
24408032
AH
6947{
6948 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6949 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6950 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6951 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6952 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6953 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6954 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6955 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6957 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6958 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6959 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6968 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6969 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6970 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6971
6972 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6973 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6974 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6975 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6976 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6977 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6978 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6979 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6980 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6981 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6982 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6983 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6984 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6985 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6986 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6987
6988 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6989 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6990 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6991 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6992 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6993 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6994 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6995 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6996 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6997};
2212663f 6998
95385cbb
AH
6999/* DST operations: void foo (void *, const int, const char). */
7000
7001static const struct builtin_description bdesc_dst[] =
7002{
7003 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7004 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7005 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7006 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7007
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7009 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7010 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7011 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7012};
7013
2212663f 7014/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7015
a3170dc6 7016static struct builtin_description bdesc_2arg[] =
0ac081f6 7017{
f18c054f
DB
7018 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7019 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7020 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7021 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7022 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7026 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7027 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7029 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7030 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7031 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7032 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7033 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7034 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7035 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7036 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7037 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7038 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7039 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7040 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7041 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7042 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7043 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7044 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7045 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7046 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7047 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7048 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7049 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7050 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7051 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7052 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7053 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7054 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7055 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7056 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7057 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7058 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7059 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7060 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7061 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7062 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7063 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7064 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7065 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7066 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7067 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7068 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7069 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7070 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7071 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7072 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7073 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7074 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7075 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7076 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7077 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7078 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7079 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7080 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7081 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7082 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7083 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7084 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7085 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7087 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7090 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7092 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7093 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7094 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
e83a75a7
IR
7096 { MASK_ALTIVEC, CODE_FOR_ashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7097 { MASK_ALTIVEC, CODE_FOR_ashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7098 { MASK_ALTIVEC, CODE_FOR_ashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7101 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7102 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
7104 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7105 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7106 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7107 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7108 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7109 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7110 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7111 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7112 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7113 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7114 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7115 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7117 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7118 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7119 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7120 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7121 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7122 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7123 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7124 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7125 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7126 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7127 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7128 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7129
58646b77
PB
7130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7243 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7244 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7245 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7246 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7247 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7248 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7249 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7250 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7251 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7254 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7255 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7256 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7257
96038623
DE
7258 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7259 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7260 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7261 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7262 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7263 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7264 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7265 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7266 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7267 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7268
a3170dc6
AH
7269 /* Place holder, leave as first spe builtin. */
7270 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7271 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7272 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7273 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7274 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7275 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7276 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7277 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7278 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7279 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7280 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7281 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7282 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7283 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7284 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7285 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7286 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7287 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7288 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7289 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7290 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7291 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7292 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7293 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7294 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7295 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7296 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7297 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7298 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7299 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7300 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7301 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7302 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7303 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7304 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7305 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7306 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7307 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7308 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7309 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7310 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7311 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7312 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7313 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7314 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7315 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7316 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7317 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7318 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7319 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7320 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7321 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7322 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7323 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7324 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7325 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7326 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7327 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7328 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7329 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7330 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7331 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7332 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7333 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7334 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7335 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7336 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7337 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7338 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7339 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7340 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7341 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7342 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7343 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7344 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7345 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7346 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7347 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7348 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7349 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7350 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7351 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7352 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7353 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7354 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7355 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7356 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7357 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7358 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7359 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7360 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7361 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7362 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7363 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7364 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7365 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7366 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7367 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7368 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7369 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7370 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7371 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7372 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7373 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7374 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7375 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7376 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7377 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7378 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7379
7380 /* SPE binary operations expecting a 5-bit unsigned literal. */
7381 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7382
7383 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7384 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7385 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7386 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7387 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7388 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7389 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7390 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7391 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7392 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7393 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7394 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7395 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7396 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7397 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7398 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7399 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7400 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7401 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7402 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7403 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7404 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7405 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7406 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7407 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7408 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7409
7410 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7411 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7412};
7413
7414/* AltiVec predicates. */
7415
7416struct builtin_description_predicates
7417{
7418 const unsigned int mask;
7419 const enum insn_code icode;
7420 const char *opcode;
7421 const char *const name;
7422 const enum rs6000_builtins code;
7423};
7424
7425static const struct builtin_description_predicates bdesc_altivec_preds[] =
7426{
7427 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7428 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7429 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7430 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7431 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7432 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7433 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7434 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7435 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7436 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7437 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7438 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7439 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7440
7441 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7442 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7443 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7444};
24408032 7445
a3170dc6
AH
7446/* SPE predicates. */
7447static struct builtin_description bdesc_spe_predicates[] =
7448{
7449 /* Place-holder. Leave as first. */
7450 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7451 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7452 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7453 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7454 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7455 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7456 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7457 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7458 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7459 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7460 /* Place-holder. Leave as last. */
7461 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7462};
7463
7464/* SPE evsel predicates. */
7465static struct builtin_description bdesc_spe_evsel[] =
7466{
7467 /* Place-holder. Leave as first. */
7468 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7469 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7470 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7471 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7472 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7473 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7474 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7475 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7476 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7477 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7478 /* Place-holder. Leave as last. */
7479 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7480};
7481
96038623
DE
7482/* PAIRED predicates. */
7483static const struct builtin_description bdesc_paired_preds[] =
7484{
7485 /* Place-holder. Leave as first. */
7486 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7487 /* Place-holder. Leave as last. */
7488 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7489};
7490
b6d08ca1 7491/* ABS* operations. */
100c4561
AH
7492
7493static const struct builtin_description bdesc_abs[] =
7494{
7495 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7496 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7497 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7498 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7499 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7500 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7501 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7502};
7503
617e0e1d
DB
7504/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7505 foo (VECa). */
24408032 7506
a3170dc6 7507static struct builtin_description bdesc_1arg[] =
2212663f 7508{
617e0e1d
DB
7509 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7510 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7511 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7512 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7513 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7514 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7515 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7516 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7517 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7518 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7519 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7520 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7521 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7522 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7523 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7524 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7525 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7526
58646b77
PB
7527 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7528 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7529 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7530 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7531 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7532 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7533 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7534 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7535 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7536 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7537 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7538 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7539 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7540 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7541 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7542 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7543 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7544 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7545 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7546
a3170dc6
AH
7547 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7548 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7549 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7550 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7551 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7552 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7553 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7554 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7555 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7556 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7557 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7558 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7559 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7560 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7561 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7562 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7563 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7564 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7565 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7566 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7567 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7568 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7569 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7570 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7571 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7572 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7573 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7574 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7575 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7576 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7577
7578 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7579 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7580
7581 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7582 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7583 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7584 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7585 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7586};
7587
7588static rtx
5039610b 7589rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7590{
7591 rtx pat;
5039610b 7592 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7593 rtx op0 = expand_normal (arg0);
2212663f
DB
7594 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7595 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7596
0559cc77
DE
7597 if (icode == CODE_FOR_nothing)
7598 /* Builtin not supported on this processor. */
7599 return 0;
7600
20e26713
AH
7601 /* If we got invalid arguments bail out before generating bad rtl. */
7602 if (arg0 == error_mark_node)
9a171fcd 7603 return const0_rtx;
20e26713 7604
0559cc77
DE
7605 if (icode == CODE_FOR_altivec_vspltisb
7606 || icode == CODE_FOR_altivec_vspltish
7607 || icode == CODE_FOR_altivec_vspltisw
7608 || icode == CODE_FOR_spe_evsplatfi
7609 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7610 {
7611 /* Only allow 5-bit *signed* literals. */
b44140e7 7612 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7613 || INTVAL (op0) > 15
7614 || INTVAL (op0) < -16)
b44140e7
AH
7615 {
7616 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7617 return const0_rtx;
b44140e7 7618 }
b44140e7
AH
7619 }
7620
c62f2db5 7621 if (target == 0
2212663f
DB
7622 || GET_MODE (target) != tmode
7623 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7624 target = gen_reg_rtx (tmode);
7625
7626 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7627 op0 = copy_to_mode_reg (mode0, op0);
7628
7629 pat = GEN_FCN (icode) (target, op0);
7630 if (! pat)
7631 return 0;
7632 emit_insn (pat);
0ac081f6 7633
2212663f
DB
7634 return target;
7635}
ae4b4a02 7636
100c4561 7637static rtx
5039610b 7638altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7639{
7640 rtx pat, scratch1, scratch2;
5039610b 7641 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7642 rtx op0 = expand_normal (arg0);
100c4561
AH
7643 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7644 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7645
7646 /* If we have invalid arguments, bail out before generating bad rtl. */
7647 if (arg0 == error_mark_node)
9a171fcd 7648 return const0_rtx;
100c4561
AH
7649
7650 if (target == 0
7651 || GET_MODE (target) != tmode
7652 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7653 target = gen_reg_rtx (tmode);
7654
7655 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7656 op0 = copy_to_mode_reg (mode0, op0);
7657
7658 scratch1 = gen_reg_rtx (mode0);
7659 scratch2 = gen_reg_rtx (mode0);
7660
7661 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7662 if (! pat)
7663 return 0;
7664 emit_insn (pat);
7665
7666 return target;
7667}
7668
0ac081f6 7669static rtx
5039610b 7670rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7671{
7672 rtx pat;
5039610b
SL
7673 tree arg0 = CALL_EXPR_ARG (exp, 0);
7674 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7675 rtx op0 = expand_normal (arg0);
7676 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7677 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7678 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7679 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7680
0559cc77
DE
7681 if (icode == CODE_FOR_nothing)
7682 /* Builtin not supported on this processor. */
7683 return 0;
7684
20e26713
AH
7685 /* If we got invalid arguments bail out before generating bad rtl. */
7686 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7687 return const0_rtx;
20e26713 7688
0559cc77
DE
7689 if (icode == CODE_FOR_altivec_vcfux
7690 || icode == CODE_FOR_altivec_vcfsx
7691 || icode == CODE_FOR_altivec_vctsxs
7692 || icode == CODE_FOR_altivec_vctuxs
7693 || icode == CODE_FOR_altivec_vspltb
7694 || icode == CODE_FOR_altivec_vsplth
7695 || icode == CODE_FOR_altivec_vspltw
7696 || icode == CODE_FOR_spe_evaddiw
7697 || icode == CODE_FOR_spe_evldd
7698 || icode == CODE_FOR_spe_evldh
7699 || icode == CODE_FOR_spe_evldw
7700 || icode == CODE_FOR_spe_evlhhesplat
7701 || icode == CODE_FOR_spe_evlhhossplat
7702 || icode == CODE_FOR_spe_evlhhousplat
7703 || icode == CODE_FOR_spe_evlwhe
7704 || icode == CODE_FOR_spe_evlwhos
7705 || icode == CODE_FOR_spe_evlwhou
7706 || icode == CODE_FOR_spe_evlwhsplat
7707 || icode == CODE_FOR_spe_evlwwsplat
7708 || icode == CODE_FOR_spe_evrlwi
7709 || icode == CODE_FOR_spe_evslwi
7710 || icode == CODE_FOR_spe_evsrwis
f5119d10 7711 || icode == CODE_FOR_spe_evsubifw
0559cc77 7712 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7713 {
7714 /* Only allow 5-bit unsigned literals. */
8bb418a3 7715 STRIP_NOPS (arg1);
b44140e7
AH
7716 if (TREE_CODE (arg1) != INTEGER_CST
7717 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7718 {
7719 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7720 return const0_rtx;
b44140e7 7721 }
b44140e7
AH
7722 }
7723
c62f2db5 7724 if (target == 0
0ac081f6
AH
7725 || GET_MODE (target) != tmode
7726 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7727 target = gen_reg_rtx (tmode);
7728
7729 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7730 op0 = copy_to_mode_reg (mode0, op0);
7731 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7732 op1 = copy_to_mode_reg (mode1, op1);
7733
7734 pat = GEN_FCN (icode) (target, op0, op1);
7735 if (! pat)
7736 return 0;
7737 emit_insn (pat);
7738
7739 return target;
7740}
6525c0e7 7741
ae4b4a02 7742static rtx
f676971a 7743altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7744 tree exp, rtx target)
ae4b4a02
AH
7745{
7746 rtx pat, scratch;
5039610b
SL
7747 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7748 tree arg0 = CALL_EXPR_ARG (exp, 1);
7749 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7750 rtx op0 = expand_normal (arg0);
7751 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7752 enum machine_mode tmode = SImode;
7753 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7754 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7755 int cr6_form_int;
7756
7757 if (TREE_CODE (cr6_form) != INTEGER_CST)
7758 {
7759 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7760 return const0_rtx;
ae4b4a02
AH
7761 }
7762 else
7763 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7764
37409796 7765 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7766
7767 /* If we have invalid arguments, bail out before generating bad rtl. */
7768 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7769 return const0_rtx;
ae4b4a02
AH
7770
7771 if (target == 0
7772 || GET_MODE (target) != tmode
7773 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7774 target = gen_reg_rtx (tmode);
7775
7776 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7777 op0 = copy_to_mode_reg (mode0, op0);
7778 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7779 op1 = copy_to_mode_reg (mode1, op1);
7780
7781 scratch = gen_reg_rtx (mode0);
7782
7783 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7784 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7785 if (! pat)
7786 return 0;
7787 emit_insn (pat);
7788
7789 /* The vec_any* and vec_all* predicates use the same opcodes for two
7790 different operations, but the bits in CR6 will be different
7791 depending on what information we want. So we have to play tricks
7792 with CR6 to get the right bits out.
7793
7794 If you think this is disgusting, look at the specs for the
7795 AltiVec predicates. */
7796
c4ad648e
AM
7797 switch (cr6_form_int)
7798 {
7799 case 0:
7800 emit_insn (gen_cr6_test_for_zero (target));
7801 break;
7802 case 1:
7803 emit_insn (gen_cr6_test_for_zero_reverse (target));
7804 break;
7805 case 2:
7806 emit_insn (gen_cr6_test_for_lt (target));
7807 break;
7808 case 3:
7809 emit_insn (gen_cr6_test_for_lt_reverse (target));
7810 break;
7811 default:
7812 error ("argument 1 of __builtin_altivec_predicate is out of range");
7813 break;
7814 }
ae4b4a02
AH
7815
7816 return target;
7817}
7818
96038623
DE
7819static rtx
7820paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7821{
7822 rtx pat, addr;
7823 tree arg0 = CALL_EXPR_ARG (exp, 0);
7824 tree arg1 = CALL_EXPR_ARG (exp, 1);
7825 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7826 enum machine_mode mode0 = Pmode;
7827 enum machine_mode mode1 = Pmode;
7828 rtx op0 = expand_normal (arg0);
7829 rtx op1 = expand_normal (arg1);
7830
7831 if (icode == CODE_FOR_nothing)
7832 /* Builtin not supported on this processor. */
7833 return 0;
7834
7835 /* If we got invalid arguments bail out before generating bad rtl. */
7836 if (arg0 == error_mark_node || arg1 == error_mark_node)
7837 return const0_rtx;
7838
7839 if (target == 0
7840 || GET_MODE (target) != tmode
7841 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7842 target = gen_reg_rtx (tmode);
7843
7844 op1 = copy_to_mode_reg (mode1, op1);
7845
7846 if (op0 == const0_rtx)
7847 {
7848 addr = gen_rtx_MEM (tmode, op1);
7849 }
7850 else
7851 {
7852 op0 = copy_to_mode_reg (mode0, op0);
7853 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7854 }
7855
7856 pat = GEN_FCN (icode) (target, addr);
7857
7858 if (! pat)
7859 return 0;
7860 emit_insn (pat);
7861
7862 return target;
7863}
7864
b4a62fa0 7865static rtx
5039610b 7866altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7867{
7868 rtx pat, addr;
5039610b
SL
7869 tree arg0 = CALL_EXPR_ARG (exp, 0);
7870 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7871 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7872 enum machine_mode mode0 = Pmode;
7873 enum machine_mode mode1 = Pmode;
84217346
MD
7874 rtx op0 = expand_normal (arg0);
7875 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7876
7877 if (icode == CODE_FOR_nothing)
7878 /* Builtin not supported on this processor. */
7879 return 0;
7880
7881 /* If we got invalid arguments bail out before generating bad rtl. */
7882 if (arg0 == error_mark_node || arg1 == error_mark_node)
7883 return const0_rtx;
7884
7885 if (target == 0
7886 || GET_MODE (target) != tmode
7887 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7888 target = gen_reg_rtx (tmode);
7889
f676971a 7890 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7891
7892 if (op0 == const0_rtx)
7893 {
7894 addr = gen_rtx_MEM (tmode, op1);
7895 }
7896 else
7897 {
7898 op0 = copy_to_mode_reg (mode0, op0);
7899 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7900 }
7901
7902 pat = GEN_FCN (icode) (target, addr);
7903
7904 if (! pat)
7905 return 0;
7906 emit_insn (pat);
7907
7908 return target;
7909}
7910
61bea3b0 7911static rtx
5039610b 7912spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7913{
5039610b
SL
7914 tree arg0 = CALL_EXPR_ARG (exp, 0);
7915 tree arg1 = CALL_EXPR_ARG (exp, 1);
7916 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7917 rtx op0 = expand_normal (arg0);
7918 rtx op1 = expand_normal (arg1);
7919 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7920 rtx pat;
7921 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7922 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7923 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7924
7925 /* Invalid arguments. Bail before doing anything stoopid! */
7926 if (arg0 == error_mark_node
7927 || arg1 == error_mark_node
7928 || arg2 == error_mark_node)
7929 return const0_rtx;
7930
7931 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7932 op0 = copy_to_mode_reg (mode2, op0);
7933 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7934 op1 = copy_to_mode_reg (mode0, op1);
7935 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7936 op2 = copy_to_mode_reg (mode1, op2);
7937
7938 pat = GEN_FCN (icode) (op1, op2, op0);
7939 if (pat)
7940 emit_insn (pat);
7941 return NULL_RTX;
7942}
7943
96038623
DE
7944static rtx
7945paired_expand_stv_builtin (enum insn_code icode, tree exp)
7946{
7947 tree arg0 = CALL_EXPR_ARG (exp, 0);
7948 tree arg1 = CALL_EXPR_ARG (exp, 1);
7949 tree arg2 = CALL_EXPR_ARG (exp, 2);
7950 rtx op0 = expand_normal (arg0);
7951 rtx op1 = expand_normal (arg1);
7952 rtx op2 = expand_normal (arg2);
7953 rtx pat, addr;
7954 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7955 enum machine_mode mode1 = Pmode;
7956 enum machine_mode mode2 = Pmode;
7957
7958 /* Invalid arguments. Bail before doing anything stoopid! */
7959 if (arg0 == error_mark_node
7960 || arg1 == error_mark_node
7961 || arg2 == error_mark_node)
7962 return const0_rtx;
7963
7964 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7965 op0 = copy_to_mode_reg (tmode, op0);
7966
7967 op2 = copy_to_mode_reg (mode2, op2);
7968
7969 if (op1 == const0_rtx)
7970 {
7971 addr = gen_rtx_MEM (tmode, op2);
7972 }
7973 else
7974 {
7975 op1 = copy_to_mode_reg (mode1, op1);
7976 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7977 }
7978
7979 pat = GEN_FCN (icode) (addr, op0);
7980 if (pat)
7981 emit_insn (pat);
7982 return NULL_RTX;
7983}
7984
6525c0e7 7985static rtx
5039610b 7986altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7987{
5039610b
SL
7988 tree arg0 = CALL_EXPR_ARG (exp, 0);
7989 tree arg1 = CALL_EXPR_ARG (exp, 1);
7990 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7991 rtx op0 = expand_normal (arg0);
7992 rtx op1 = expand_normal (arg1);
7993 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7994 rtx pat, addr;
7995 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7996 enum machine_mode mode1 = Pmode;
7997 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7998
7999 /* Invalid arguments. Bail before doing anything stoopid! */
8000 if (arg0 == error_mark_node
8001 || arg1 == error_mark_node
8002 || arg2 == error_mark_node)
9a171fcd 8003 return const0_rtx;
6525c0e7 8004
b4a62fa0
SB
8005 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8006 op0 = copy_to_mode_reg (tmode, op0);
8007
f676971a 8008 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8009
8010 if (op1 == const0_rtx)
8011 {
8012 addr = gen_rtx_MEM (tmode, op2);
8013 }
8014 else
8015 {
8016 op1 = copy_to_mode_reg (mode1, op1);
8017 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8018 }
6525c0e7 8019
b4a62fa0 8020 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8021 if (pat)
8022 emit_insn (pat);
8023 return NULL_RTX;
8024}
8025
2212663f 8026static rtx
5039610b 8027rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8028{
8029 rtx pat;
5039610b
SL
8030 tree arg0 = CALL_EXPR_ARG (exp, 0);
8031 tree arg1 = CALL_EXPR_ARG (exp, 1);
8032 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8033 rtx op0 = expand_normal (arg0);
8034 rtx op1 = expand_normal (arg1);
8035 rtx op2 = expand_normal (arg2);
2212663f
DB
8036 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8037 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8038 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8039 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8040
774b5662
DE
8041 if (icode == CODE_FOR_nothing)
8042 /* Builtin not supported on this processor. */
8043 return 0;
8044
20e26713
AH
8045 /* If we got invalid arguments bail out before generating bad rtl. */
8046 if (arg0 == error_mark_node
8047 || arg1 == error_mark_node
8048 || arg2 == error_mark_node)
9a171fcd 8049 return const0_rtx;
20e26713 8050
aba5fb01
NS
8051 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8052 || icode == CODE_FOR_altivec_vsldoi_v4si
8053 || icode == CODE_FOR_altivec_vsldoi_v8hi
8054 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8055 {
8056 /* Only allow 4-bit unsigned literals. */
8bb418a3 8057 STRIP_NOPS (arg2);
b44140e7
AH
8058 if (TREE_CODE (arg2) != INTEGER_CST
8059 || TREE_INT_CST_LOW (arg2) & ~0xf)
8060 {
8061 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8062 return const0_rtx;
b44140e7 8063 }
b44140e7
AH
8064 }
8065
c62f2db5 8066 if (target == 0
2212663f
DB
8067 || GET_MODE (target) != tmode
8068 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8069 target = gen_reg_rtx (tmode);
8070
8071 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8072 op0 = copy_to_mode_reg (mode0, op0);
8073 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8074 op1 = copy_to_mode_reg (mode1, op1);
8075 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8076 op2 = copy_to_mode_reg (mode2, op2);
8077
49e39588
RE
8078 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8079 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8080 else
8081 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8082 if (! pat)
8083 return 0;
8084 emit_insn (pat);
8085
8086 return target;
8087}
92898235 8088
3a9b8c7e 8089/* Expand the lvx builtins. */
0ac081f6 8090static rtx
a2369ed3 8091altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8092{
5039610b 8093 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8094 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8095 tree arg0;
8096 enum machine_mode tmode, mode0;
7c3abc73 8097 rtx pat, op0;
3a9b8c7e 8098 enum insn_code icode;
92898235 8099
0ac081f6
AH
8100 switch (fcode)
8101 {
f18c054f 8102 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8103 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8104 break;
f18c054f 8105 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8106 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8107 break;
8108 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8109 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8110 break;
8111 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8112 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8113 break;
8114 default:
8115 *expandedp = false;
8116 return NULL_RTX;
8117 }
0ac081f6 8118
3a9b8c7e 8119 *expandedp = true;
f18c054f 8120
5039610b 8121 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8122 op0 = expand_normal (arg0);
3a9b8c7e
AH
8123 tmode = insn_data[icode].operand[0].mode;
8124 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8125
3a9b8c7e
AH
8126 if (target == 0
8127 || GET_MODE (target) != tmode
8128 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8129 target = gen_reg_rtx (tmode);
24408032 8130
3a9b8c7e
AH
8131 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8132 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8133
3a9b8c7e
AH
8134 pat = GEN_FCN (icode) (target, op0);
8135 if (! pat)
8136 return 0;
8137 emit_insn (pat);
8138 return target;
8139}
f18c054f 8140
3a9b8c7e
AH
8141/* Expand the stvx builtins. */
8142static rtx
f676971a 8143altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8144 bool *expandedp)
3a9b8c7e 8145{
5039610b 8146 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8147 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8148 tree arg0, arg1;
8149 enum machine_mode mode0, mode1;
7c3abc73 8150 rtx pat, op0, op1;
3a9b8c7e 8151 enum insn_code icode;
f18c054f 8152
3a9b8c7e
AH
8153 switch (fcode)
8154 {
8155 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8156 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8157 break;
8158 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8159 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8160 break;
8161 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8162 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8163 break;
8164 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8165 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8166 break;
8167 default:
8168 *expandedp = false;
8169 return NULL_RTX;
8170 }
24408032 8171
5039610b
SL
8172 arg0 = CALL_EXPR_ARG (exp, 0);
8173 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8174 op0 = expand_normal (arg0);
8175 op1 = expand_normal (arg1);
3a9b8c7e
AH
8176 mode0 = insn_data[icode].operand[0].mode;
8177 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8178
3a9b8c7e
AH
8179 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8180 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8181 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8182 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8183
3a9b8c7e
AH
8184 pat = GEN_FCN (icode) (op0, op1);
8185 if (pat)
8186 emit_insn (pat);
f18c054f 8187
3a9b8c7e
AH
8188 *expandedp = true;
8189 return NULL_RTX;
8190}
f18c054f 8191
3a9b8c7e
AH
8192/* Expand the dst builtins. */
8193static rtx
f676971a 8194altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8195 bool *expandedp)
3a9b8c7e 8196{
5039610b 8197 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8198 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8199 tree arg0, arg1, arg2;
8200 enum machine_mode mode0, mode1, mode2;
7c3abc73 8201 rtx pat, op0, op1, op2;
586de218 8202 const struct builtin_description *d;
a3170dc6 8203 size_t i;
f18c054f 8204
3a9b8c7e 8205 *expandedp = false;
f18c054f 8206
3a9b8c7e 8207 /* Handle DST variants. */
586de218 8208 d = bdesc_dst;
3a9b8c7e
AH
8209 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8210 if (d->code == fcode)
8211 {
5039610b
SL
8212 arg0 = CALL_EXPR_ARG (exp, 0);
8213 arg1 = CALL_EXPR_ARG (exp, 1);
8214 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8215 op0 = expand_normal (arg0);
8216 op1 = expand_normal (arg1);
8217 op2 = expand_normal (arg2);
3a9b8c7e
AH
8218 mode0 = insn_data[d->icode].operand[0].mode;
8219 mode1 = insn_data[d->icode].operand[1].mode;
8220 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8221
3a9b8c7e
AH
8222 /* Invalid arguments, bail out before generating bad rtl. */
8223 if (arg0 == error_mark_node
8224 || arg1 == error_mark_node
8225 || arg2 == error_mark_node)
8226 return const0_rtx;
f18c054f 8227
86e7df90 8228 *expandedp = true;
8bb418a3 8229 STRIP_NOPS (arg2);
3a9b8c7e
AH
8230 if (TREE_CODE (arg2) != INTEGER_CST
8231 || TREE_INT_CST_LOW (arg2) & ~0x3)
8232 {
9e637a26 8233 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8234 return const0_rtx;
8235 }
f18c054f 8236
3a9b8c7e 8237 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8238 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8239 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8240 op1 = copy_to_mode_reg (mode1, op1);
24408032 8241
3a9b8c7e
AH
8242 pat = GEN_FCN (d->icode) (op0, op1, op2);
8243 if (pat != 0)
8244 emit_insn (pat);
f18c054f 8245
3a9b8c7e
AH
8246 return NULL_RTX;
8247 }
f18c054f 8248
3a9b8c7e
AH
8249 return NULL_RTX;
8250}
24408032 8251
7a4eca66
DE
8252/* Expand vec_init builtin. */
8253static rtx
5039610b 8254altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8255{
8256 enum machine_mode tmode = TYPE_MODE (type);
8257 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8258 int i, n_elt = GET_MODE_NUNITS (tmode);
8259 rtvec v = rtvec_alloc (n_elt);
8260
8261 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8262 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8263
5039610b 8264 for (i = 0; i < n_elt; ++i)
7a4eca66 8265 {
5039610b 8266 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8267 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8268 }
8269
7a4eca66
DE
8270 if (!target || !register_operand (target, tmode))
8271 target = gen_reg_rtx (tmode);
8272
8273 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8274 return target;
8275}
8276
8277/* Return the integer constant in ARG. Constrain it to be in the range
8278 of the subparts of VEC_TYPE; issue an error if not. */
8279
8280static int
8281get_element_number (tree vec_type, tree arg)
8282{
8283 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8284
8285 if (!host_integerp (arg, 1)
8286 || (elt = tree_low_cst (arg, 1), elt > max))
8287 {
8288 error ("selector must be an integer constant in the range 0..%wi", max);
8289 return 0;
8290 }
8291
8292 return elt;
8293}
8294
8295/* Expand vec_set builtin. */
8296static rtx
5039610b 8297altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8298{
8299 enum machine_mode tmode, mode1;
8300 tree arg0, arg1, arg2;
8301 int elt;
8302 rtx op0, op1;
8303
5039610b
SL
8304 arg0 = CALL_EXPR_ARG (exp, 0);
8305 arg1 = CALL_EXPR_ARG (exp, 1);
8306 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8307
8308 tmode = TYPE_MODE (TREE_TYPE (arg0));
8309 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8310 gcc_assert (VECTOR_MODE_P (tmode));
8311
8312 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8313 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8314 elt = get_element_number (TREE_TYPE (arg0), arg2);
8315
8316 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8317 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8318
8319 op0 = force_reg (tmode, op0);
8320 op1 = force_reg (mode1, op1);
8321
8322 rs6000_expand_vector_set (op0, op1, elt);
8323
8324 return op0;
8325}
8326
8327/* Expand vec_ext builtin. */
8328static rtx
5039610b 8329altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8330{
8331 enum machine_mode tmode, mode0;
8332 tree arg0, arg1;
8333 int elt;
8334 rtx op0;
8335
5039610b
SL
8336 arg0 = CALL_EXPR_ARG (exp, 0);
8337 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8338
84217346 8339 op0 = expand_normal (arg0);
7a4eca66
DE
8340 elt = get_element_number (TREE_TYPE (arg0), arg1);
8341
8342 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8343 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8344 gcc_assert (VECTOR_MODE_P (mode0));
8345
8346 op0 = force_reg (mode0, op0);
8347
8348 if (optimize || !target || !register_operand (target, tmode))
8349 target = gen_reg_rtx (tmode);
8350
8351 rs6000_expand_vector_extract (target, op0, elt);
8352
8353 return target;
8354}
8355
3a9b8c7e
AH
8356/* Expand the builtin in EXP and store the result in TARGET. Store
8357 true in *EXPANDEDP if we found a builtin to expand. */
8358static rtx
a2369ed3 8359altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8360{
586de218
KG
8361 const struct builtin_description *d;
8362 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8363 size_t i;
8364 enum insn_code icode;
5039610b 8365 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8366 tree arg0;
8367 rtx op0, pat;
8368 enum machine_mode tmode, mode0;
3a9b8c7e 8369 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8370
58646b77
PB
8371 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8372 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8373 {
8374 *expandedp = true;
ea40ba9c 8375 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8376 return const0_rtx;
8377 }
8378
3a9b8c7e
AH
8379 target = altivec_expand_ld_builtin (exp, target, expandedp);
8380 if (*expandedp)
8381 return target;
0ac081f6 8382
3a9b8c7e
AH
8383 target = altivec_expand_st_builtin (exp, target, expandedp);
8384 if (*expandedp)
8385 return target;
8386
8387 target = altivec_expand_dst_builtin (exp, target, expandedp);
8388 if (*expandedp)
8389 return target;
8390
8391 *expandedp = true;
95385cbb 8392
3a9b8c7e
AH
8393 switch (fcode)
8394 {
6525c0e7 8395 case ALTIVEC_BUILTIN_STVX:
5039610b 8396 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8397 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8398 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8399 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8400 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8401 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8402 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8403 case ALTIVEC_BUILTIN_STVXL:
5039610b 8404 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8405
95385cbb
AH
8406 case ALTIVEC_BUILTIN_MFVSCR:
8407 icode = CODE_FOR_altivec_mfvscr;
8408 tmode = insn_data[icode].operand[0].mode;
8409
8410 if (target == 0
8411 || GET_MODE (target) != tmode
8412 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8413 target = gen_reg_rtx (tmode);
f676971a 8414
95385cbb 8415 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8416 if (! pat)
8417 return 0;
8418 emit_insn (pat);
95385cbb
AH
8419 return target;
8420
8421 case ALTIVEC_BUILTIN_MTVSCR:
8422 icode = CODE_FOR_altivec_mtvscr;
5039610b 8423 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8424 op0 = expand_normal (arg0);
95385cbb
AH
8425 mode0 = insn_data[icode].operand[0].mode;
8426
8427 /* If we got invalid arguments bail out before generating bad rtl. */
8428 if (arg0 == error_mark_node)
9a171fcd 8429 return const0_rtx;
95385cbb
AH
8430
8431 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8432 op0 = copy_to_mode_reg (mode0, op0);
8433
8434 pat = GEN_FCN (icode) (op0);
8435 if (pat)
8436 emit_insn (pat);
8437 return NULL_RTX;
3a9b8c7e 8438
95385cbb
AH
8439 case ALTIVEC_BUILTIN_DSSALL:
8440 emit_insn (gen_altivec_dssall ());
8441 return NULL_RTX;
8442
8443 case ALTIVEC_BUILTIN_DSS:
8444 icode = CODE_FOR_altivec_dss;
5039610b 8445 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8446 STRIP_NOPS (arg0);
84217346 8447 op0 = expand_normal (arg0);
95385cbb
AH
8448 mode0 = insn_data[icode].operand[0].mode;
8449
8450 /* If we got invalid arguments bail out before generating bad rtl. */
8451 if (arg0 == error_mark_node)
9a171fcd 8452 return const0_rtx;
95385cbb 8453
b44140e7
AH
8454 if (TREE_CODE (arg0) != INTEGER_CST
8455 || TREE_INT_CST_LOW (arg0) & ~0x3)
8456 {
8457 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8458 return const0_rtx;
b44140e7
AH
8459 }
8460
95385cbb
AH
8461 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8462 op0 = copy_to_mode_reg (mode0, op0);
8463
8464 emit_insn (gen_altivec_dss (op0));
0ac081f6 8465 return NULL_RTX;
7a4eca66
DE
8466
8467 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8468 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8469 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8470 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8471 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8472
8473 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8474 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8475 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8476 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8477 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8478
8479 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8480 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8481 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8482 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8483 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8484
8485 default:
8486 break;
8487 /* Fall through. */
0ac081f6 8488 }
24408032 8489
100c4561 8490 /* Expand abs* operations. */
586de218 8491 d = bdesc_abs;
ca7558fc 8492 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8493 if (d->code == fcode)
5039610b 8494 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8495
ae4b4a02 8496 /* Expand the AltiVec predicates. */
586de218 8497 dp = bdesc_altivec_preds;
ca7558fc 8498 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8499 if (dp->code == fcode)
c4ad648e 8500 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8501 exp, target);
ae4b4a02 8502
6525c0e7
AH
8503 /* LV* are funky. We initialized them differently. */
8504 switch (fcode)
8505 {
8506 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8507 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8508 exp, target);
6525c0e7 8509 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8510 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8511 exp, target);
6525c0e7 8512 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8513 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8514 exp, target);
6525c0e7 8515 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8516 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8517 exp, target);
6525c0e7 8518 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8519 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8520 exp, target);
6525c0e7 8521 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8522 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8523 exp, target);
6525c0e7 8524 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8525 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8526 exp, target);
6525c0e7
AH
8527 default:
8528 break;
8529 /* Fall through. */
8530 }
95385cbb 8531
92898235 8532 *expandedp = false;
0ac081f6
AH
8533 return NULL_RTX;
8534}
8535
96038623
DE
8536/* Expand the builtin in EXP and store the result in TARGET. Store
8537 true in *EXPANDEDP if we found a builtin to expand. */
8538static rtx
8539paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8540{
8541 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8542 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8543 const struct builtin_description *d;
96038623
DE
8544 size_t i;
8545
8546 *expandedp = true;
8547
8548 switch (fcode)
8549 {
8550 case PAIRED_BUILTIN_STX:
8551 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8552 case PAIRED_BUILTIN_LX:
8553 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8554 default:
8555 break;
8556 /* Fall through. */
8557 }
8558
8559 /* Expand the paired predicates. */
23a651fc 8560 d = bdesc_paired_preds;
96038623
DE
8561 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8562 if (d->code == fcode)
8563 return paired_expand_predicate_builtin (d->icode, exp, target);
8564
8565 *expandedp = false;
8566 return NULL_RTX;
8567}
8568
a3170dc6
AH
8569/* Binops that need to be initialized manually, but can be expanded
8570 automagically by rs6000_expand_binop_builtin. */
8571static struct builtin_description bdesc_2arg_spe[] =
8572{
8573 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8574 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8575 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8576 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8577 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8578 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8579 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8580 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8581 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8582 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8583 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8584 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8585 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8586 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8587 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8588 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8589 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8590 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8591 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8592 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8593 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8594 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8595};
8596
8597/* Expand the builtin in EXP and store the result in TARGET. Store
8598 true in *EXPANDEDP if we found a builtin to expand.
8599
8600 This expands the SPE builtins that are not simple unary and binary
8601 operations. */
8602static rtx
a2369ed3 8603spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8604{
5039610b 8605 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8606 tree arg1, arg0;
8607 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8608 enum insn_code icode;
8609 enum machine_mode tmode, mode0;
8610 rtx pat, op0;
8611 struct builtin_description *d;
8612 size_t i;
8613
8614 *expandedp = true;
8615
8616 /* Syntax check for a 5-bit unsigned immediate. */
8617 switch (fcode)
8618 {
8619 case SPE_BUILTIN_EVSTDD:
8620 case SPE_BUILTIN_EVSTDH:
8621 case SPE_BUILTIN_EVSTDW:
8622 case SPE_BUILTIN_EVSTWHE:
8623 case SPE_BUILTIN_EVSTWHO:
8624 case SPE_BUILTIN_EVSTWWE:
8625 case SPE_BUILTIN_EVSTWWO:
5039610b 8626 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8627 if (TREE_CODE (arg1) != INTEGER_CST
8628 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8629 {
8630 error ("argument 2 must be a 5-bit unsigned literal");
8631 return const0_rtx;
8632 }
8633 break;
8634 default:
8635 break;
8636 }
8637
00332c9f
AH
8638 /* The evsplat*i instructions are not quite generic. */
8639 switch (fcode)
8640 {
8641 case SPE_BUILTIN_EVSPLATFI:
8642 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8643 exp, target);
00332c9f
AH
8644 case SPE_BUILTIN_EVSPLATI:
8645 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8646 exp, target);
00332c9f
AH
8647 default:
8648 break;
8649 }
8650
a3170dc6
AH
8651 d = (struct builtin_description *) bdesc_2arg_spe;
8652 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8653 if (d->code == fcode)
5039610b 8654 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8655
8656 d = (struct builtin_description *) bdesc_spe_predicates;
8657 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8658 if (d->code == fcode)
5039610b 8659 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8660
8661 d = (struct builtin_description *) bdesc_spe_evsel;
8662 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8663 if (d->code == fcode)
5039610b 8664 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8665
8666 switch (fcode)
8667 {
8668 case SPE_BUILTIN_EVSTDDX:
5039610b 8669 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8670 case SPE_BUILTIN_EVSTDHX:
5039610b 8671 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8672 case SPE_BUILTIN_EVSTDWX:
5039610b 8673 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8674 case SPE_BUILTIN_EVSTWHEX:
5039610b 8675 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8676 case SPE_BUILTIN_EVSTWHOX:
5039610b 8677 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8678 case SPE_BUILTIN_EVSTWWEX:
5039610b 8679 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8680 case SPE_BUILTIN_EVSTWWOX:
5039610b 8681 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8682 case SPE_BUILTIN_EVSTDD:
5039610b 8683 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8684 case SPE_BUILTIN_EVSTDH:
5039610b 8685 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8686 case SPE_BUILTIN_EVSTDW:
5039610b 8687 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8688 case SPE_BUILTIN_EVSTWHE:
5039610b 8689 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8690 case SPE_BUILTIN_EVSTWHO:
5039610b 8691 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8692 case SPE_BUILTIN_EVSTWWE:
5039610b 8693 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8694 case SPE_BUILTIN_EVSTWWO:
5039610b 8695 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8696 case SPE_BUILTIN_MFSPEFSCR:
8697 icode = CODE_FOR_spe_mfspefscr;
8698 tmode = insn_data[icode].operand[0].mode;
8699
8700 if (target == 0
8701 || GET_MODE (target) != tmode
8702 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8703 target = gen_reg_rtx (tmode);
f676971a 8704
a3170dc6
AH
8705 pat = GEN_FCN (icode) (target);
8706 if (! pat)
8707 return 0;
8708 emit_insn (pat);
8709 return target;
8710 case SPE_BUILTIN_MTSPEFSCR:
8711 icode = CODE_FOR_spe_mtspefscr;
5039610b 8712 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8713 op0 = expand_normal (arg0);
a3170dc6
AH
8714 mode0 = insn_data[icode].operand[0].mode;
8715
8716 if (arg0 == error_mark_node)
8717 return const0_rtx;
8718
8719 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8720 op0 = copy_to_mode_reg (mode0, op0);
8721
8722 pat = GEN_FCN (icode) (op0);
8723 if (pat)
8724 emit_insn (pat);
8725 return NULL_RTX;
8726 default:
8727 break;
8728 }
8729
8730 *expandedp = false;
8731 return NULL_RTX;
8732}
8733
96038623
DE
8734static rtx
8735paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8736{
8737 rtx pat, scratch, tmp;
8738 tree form = CALL_EXPR_ARG (exp, 0);
8739 tree arg0 = CALL_EXPR_ARG (exp, 1);
8740 tree arg1 = CALL_EXPR_ARG (exp, 2);
8741 rtx op0 = expand_normal (arg0);
8742 rtx op1 = expand_normal (arg1);
8743 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8744 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8745 int form_int;
8746 enum rtx_code code;
8747
8748 if (TREE_CODE (form) != INTEGER_CST)
8749 {
8750 error ("argument 1 of __builtin_paired_predicate must be a constant");
8751 return const0_rtx;
8752 }
8753 else
8754 form_int = TREE_INT_CST_LOW (form);
8755
8756 gcc_assert (mode0 == mode1);
8757
8758 if (arg0 == error_mark_node || arg1 == error_mark_node)
8759 return const0_rtx;
8760
8761 if (target == 0
8762 || GET_MODE (target) != SImode
8763 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8764 target = gen_reg_rtx (SImode);
8765 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8766 op0 = copy_to_mode_reg (mode0, op0);
8767 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8768 op1 = copy_to_mode_reg (mode1, op1);
8769
8770 scratch = gen_reg_rtx (CCFPmode);
8771
8772 pat = GEN_FCN (icode) (scratch, op0, op1);
8773 if (!pat)
8774 return const0_rtx;
8775
8776 emit_insn (pat);
8777
8778 switch (form_int)
8779 {
8780 /* LT bit. */
8781 case 0:
8782 code = LT;
8783 break;
8784 /* GT bit. */
8785 case 1:
8786 code = GT;
8787 break;
8788 /* EQ bit. */
8789 case 2:
8790 code = EQ;
8791 break;
8792 /* UN bit. */
8793 case 3:
8794 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8795 return target;
8796 default:
8797 error ("argument 1 of __builtin_paired_predicate is out of range");
8798 return const0_rtx;
8799 }
8800
8801 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8802 emit_move_insn (target, tmp);
8803 return target;
8804}
8805
a3170dc6 8806static rtx
5039610b 8807spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8808{
8809 rtx pat, scratch, tmp;
5039610b
SL
8810 tree form = CALL_EXPR_ARG (exp, 0);
8811 tree arg0 = CALL_EXPR_ARG (exp, 1);
8812 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8813 rtx op0 = expand_normal (arg0);
8814 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8815 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8816 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8817 int form_int;
8818 enum rtx_code code;
8819
8820 if (TREE_CODE (form) != INTEGER_CST)
8821 {
8822 error ("argument 1 of __builtin_spe_predicate must be a constant");
8823 return const0_rtx;
8824 }
8825 else
8826 form_int = TREE_INT_CST_LOW (form);
8827
37409796 8828 gcc_assert (mode0 == mode1);
a3170dc6
AH
8829
8830 if (arg0 == error_mark_node || arg1 == error_mark_node)
8831 return const0_rtx;
8832
8833 if (target == 0
8834 || GET_MODE (target) != SImode
8835 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8836 target = gen_reg_rtx (SImode);
8837
8838 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8839 op0 = copy_to_mode_reg (mode0, op0);
8840 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8841 op1 = copy_to_mode_reg (mode1, op1);
8842
8843 scratch = gen_reg_rtx (CCmode);
8844
8845 pat = GEN_FCN (icode) (scratch, op0, op1);
8846 if (! pat)
8847 return const0_rtx;
8848 emit_insn (pat);
8849
8850 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8851 _lower_. We use one compare, but look in different bits of the
8852 CR for each variant.
8853
8854 There are 2 elements in each SPE simd type (upper/lower). The CR
8855 bits are set as follows:
8856
8857 BIT0 | BIT 1 | BIT 2 | BIT 3
8858 U | L | (U | L) | (U & L)
8859
8860 So, for an "all" relationship, BIT 3 would be set.
8861 For an "any" relationship, BIT 2 would be set. Etc.
8862
8863 Following traditional nomenclature, these bits map to:
8864
8865 BIT0 | BIT 1 | BIT 2 | BIT 3
8866 LT | GT | EQ | OV
8867
8868 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8869 */
8870
8871 switch (form_int)
8872 {
8873 /* All variant. OV bit. */
8874 case 0:
8875 /* We need to get to the OV bit, which is the ORDERED bit. We
8876 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8877 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8878 So let's just use another pattern. */
8879 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8880 return target;
8881 /* Any variant. EQ bit. */
8882 case 1:
8883 code = EQ;
8884 break;
8885 /* Upper variant. LT bit. */
8886 case 2:
8887 code = LT;
8888 break;
8889 /* Lower variant. GT bit. */
8890 case 3:
8891 code = GT;
8892 break;
8893 default:
8894 error ("argument 1 of __builtin_spe_predicate is out of range");
8895 return const0_rtx;
8896 }
8897
8898 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8899 emit_move_insn (target, tmp);
8900
8901 return target;
8902}
8903
8904/* The evsel builtins look like this:
8905
8906 e = __builtin_spe_evsel_OP (a, b, c, d);
8907
8908 and work like this:
8909
8910 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8911 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8912*/
8913
8914static rtx
5039610b 8915spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8916{
8917 rtx pat, scratch;
5039610b
SL
8918 tree arg0 = CALL_EXPR_ARG (exp, 0);
8919 tree arg1 = CALL_EXPR_ARG (exp, 1);
8920 tree arg2 = CALL_EXPR_ARG (exp, 2);
8921 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8922 rtx op0 = expand_normal (arg0);
8923 rtx op1 = expand_normal (arg1);
8924 rtx op2 = expand_normal (arg2);
8925 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8926 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8927 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8928
37409796 8929 gcc_assert (mode0 == mode1);
a3170dc6
AH
8930
8931 if (arg0 == error_mark_node || arg1 == error_mark_node
8932 || arg2 == error_mark_node || arg3 == error_mark_node)
8933 return const0_rtx;
8934
8935 if (target == 0
8936 || GET_MODE (target) != mode0
8937 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8938 target = gen_reg_rtx (mode0);
8939
8940 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8941 op0 = copy_to_mode_reg (mode0, op0);
8942 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8943 op1 = copy_to_mode_reg (mode0, op1);
8944 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8945 op2 = copy_to_mode_reg (mode0, op2);
8946 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8947 op3 = copy_to_mode_reg (mode0, op3);
8948
8949 /* Generate the compare. */
8950 scratch = gen_reg_rtx (CCmode);
8951 pat = GEN_FCN (icode) (scratch, op0, op1);
8952 if (! pat)
8953 return const0_rtx;
8954 emit_insn (pat);
8955
8956 if (mode0 == V2SImode)
8957 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8958 else
8959 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8960
8961 return target;
8962}
8963
0ac081f6
AH
8964/* Expand an expression EXP that calls a built-in function,
8965 with result going to TARGET if that's convenient
8966 (and in mode MODE if that's convenient).
8967 SUBTARGET may be used as the target for computing one of EXP's operands.
8968 IGNORE is nonzero if the value is to be ignored. */
8969
8970static rtx
a2369ed3 8971rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8972 enum machine_mode mode ATTRIBUTE_UNUSED,
8973 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8974{
5039610b 8975 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8976 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8977 const struct builtin_description *d;
92898235
AH
8978 size_t i;
8979 rtx ret;
8980 bool success;
f676971a 8981
9c78b944
DE
8982 if (fcode == RS6000_BUILTIN_RECIP)
8983 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8984
8985 if (fcode == RS6000_BUILTIN_RECIPF)
8986 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8987
8988 if (fcode == RS6000_BUILTIN_RSQRTF)
8989 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8990
7ccf35ed
DN
8991 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8992 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8993 {
8994 int icode = (int) CODE_FOR_altivec_lvsr;
8995 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8996 enum machine_mode mode = insn_data[icode].operand[1].mode;
8997 tree arg;
8998 rtx op, addr, pat;
8999
37409796 9000 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9001
5039610b 9002 arg = CALL_EXPR_ARG (exp, 0);
37409796 9003 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9004 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9005 addr = memory_address (mode, op);
9006 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9007 op = addr;
9008 else
9009 {
9010 /* For the load case need to negate the address. */
9011 op = gen_reg_rtx (GET_MODE (addr));
9012 emit_insn (gen_rtx_SET (VOIDmode, op,
9013 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9014 }
7ccf35ed
DN
9015 op = gen_rtx_MEM (mode, op);
9016
9017 if (target == 0
9018 || GET_MODE (target) != tmode
9019 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9020 target = gen_reg_rtx (tmode);
9021
9022 /*pat = gen_altivec_lvsr (target, op);*/
9023 pat = GEN_FCN (icode) (target, op);
9024 if (!pat)
9025 return 0;
9026 emit_insn (pat);
9027
9028 return target;
9029 }
5039610b
SL
9030
9031 /* FIXME: There's got to be a nicer way to handle this case than
9032 constructing a new CALL_EXPR. */
f57d17f1
TM
9033 if (fcode == ALTIVEC_BUILTIN_VCFUX
9034 || fcode == ALTIVEC_BUILTIN_VCFSX)
9035 {
5039610b
SL
9036 if (call_expr_nargs (exp) == 1)
9037 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9038 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9039 }
7ccf35ed 9040
0ac081f6 9041 if (TARGET_ALTIVEC)
92898235
AH
9042 {
9043 ret = altivec_expand_builtin (exp, target, &success);
9044
a3170dc6
AH
9045 if (success)
9046 return ret;
9047 }
9048 if (TARGET_SPE)
9049 {
9050 ret = spe_expand_builtin (exp, target, &success);
9051
92898235
AH
9052 if (success)
9053 return ret;
9054 }
96038623
DE
9055 if (TARGET_PAIRED_FLOAT)
9056 {
9057 ret = paired_expand_builtin (exp, target, &success);
9058
9059 if (success)
9060 return ret;
9061 }
92898235 9062
96038623 9063 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9064
37409796
NS
9065 /* Handle simple unary operations. */
9066 d = (struct builtin_description *) bdesc_1arg;
9067 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9068 if (d->code == fcode)
5039610b 9069 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9070
37409796
NS
9071 /* Handle simple binary operations. */
9072 d = (struct builtin_description *) bdesc_2arg;
9073 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9074 if (d->code == fcode)
5039610b 9075 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9076
37409796 9077 /* Handle simple ternary operations. */
586de218 9078 d = bdesc_3arg;
37409796
NS
9079 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9080 if (d->code == fcode)
5039610b 9081 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9082
37409796 9083 gcc_unreachable ();
0ac081f6
AH
9084}
9085
7c62e993
PB
9086static tree
9087build_opaque_vector_type (tree node, int nunits)
9088{
9089 node = copy_node (node);
9090 TYPE_MAIN_VARIANT (node) = node;
9091 return build_vector_type (node, nunits);
9092}
9093
0ac081f6 9094static void
863d938c 9095rs6000_init_builtins (void)
0ac081f6 9096{
4a5eab38
PB
9097 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9098 V2SF_type_node = build_vector_type (float_type_node, 2);
9099 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9100 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9101 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9102 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9103 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9104
9105 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9106 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9107 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9108
7c62e993
PB
9109 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9110 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9111 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9112 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9113
8bb418a3
ZL
9114 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9115 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9116 'vector unsigned short'. */
9117
8dd16ecc
NS
9118 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9119 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9120 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9121 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9122
58646b77
PB
9123 long_integer_type_internal_node = long_integer_type_node;
9124 long_unsigned_type_internal_node = long_unsigned_type_node;
9125 intQI_type_internal_node = intQI_type_node;
9126 uintQI_type_internal_node = unsigned_intQI_type_node;
9127 intHI_type_internal_node = intHI_type_node;
9128 uintHI_type_internal_node = unsigned_intHI_type_node;
9129 intSI_type_internal_node = intSI_type_node;
9130 uintSI_type_internal_node = unsigned_intSI_type_node;
9131 float_type_internal_node = float_type_node;
9132 void_type_internal_node = void_type_node;
9133
8bb418a3
ZL
9134 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9135 get_identifier ("__bool char"),
9136 bool_char_type_node));
9137 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9138 get_identifier ("__bool short"),
9139 bool_short_type_node));
9140 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9141 get_identifier ("__bool int"),
9142 bool_int_type_node));
9143 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9144 get_identifier ("__pixel"),
9145 pixel_type_node));
9146
4a5eab38
PB
9147 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9148 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9149 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9150 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9151
9152 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9153 get_identifier ("__vector unsigned char"),
9154 unsigned_V16QI_type_node));
9155 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9156 get_identifier ("__vector signed char"),
9157 V16QI_type_node));
9158 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9159 get_identifier ("__vector __bool char"),
9160 bool_V16QI_type_node));
9161
9162 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9163 get_identifier ("__vector unsigned short"),
9164 unsigned_V8HI_type_node));
9165 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9166 get_identifier ("__vector signed short"),
9167 V8HI_type_node));
9168 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9169 get_identifier ("__vector __bool short"),
9170 bool_V8HI_type_node));
9171
9172 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9173 get_identifier ("__vector unsigned int"),
9174 unsigned_V4SI_type_node));
9175 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9176 get_identifier ("__vector signed int"),
9177 V4SI_type_node));
9178 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9179 get_identifier ("__vector __bool int"),
9180 bool_V4SI_type_node));
9181
9182 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9183 get_identifier ("__vector float"),
9184 V4SF_type_node));
9185 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9186 get_identifier ("__vector __pixel"),
9187 pixel_V8HI_type_node));
9188
96038623
DE
9189 if (TARGET_PAIRED_FLOAT)
9190 paired_init_builtins ();
a3170dc6 9191 if (TARGET_SPE)
3fdaa45a 9192 spe_init_builtins ();
0ac081f6
AH
9193 if (TARGET_ALTIVEC)
9194 altivec_init_builtins ();
96038623 9195 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9196 rs6000_common_init_builtins ();
9c78b944
DE
9197 if (TARGET_PPC_GFXOPT)
9198 {
9199 tree ftype = build_function_type_list (float_type_node,
9200 float_type_node,
9201 float_type_node,
9202 NULL_TREE);
9203 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9204 RS6000_BUILTIN_RECIPF);
9205
9206 ftype = build_function_type_list (float_type_node,
9207 float_type_node,
9208 NULL_TREE);
9209 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9210 RS6000_BUILTIN_RSQRTF);
9211 }
9212 if (TARGET_POPCNTB)
9213 {
9214 tree ftype = build_function_type_list (double_type_node,
9215 double_type_node,
9216 double_type_node,
9217 NULL_TREE);
9218 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9219 RS6000_BUILTIN_RECIP);
9220
9221 }
69ca3549
DE
9222
9223#if TARGET_XCOFF
9224 /* AIX libm provides clog as __clog. */
9225 if (built_in_decls [BUILT_IN_CLOG])
9226 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9227#endif
fb220235
FXC
9228
9229#ifdef SUBTARGET_INIT_BUILTINS
9230 SUBTARGET_INIT_BUILTINS;
9231#endif
0ac081f6
AH
9232}
9233
a3170dc6
AH
9234/* Search through a set of builtins and enable the mask bits.
9235 DESC is an array of builtins.
b6d08ca1 9236 SIZE is the total number of builtins.
a3170dc6
AH
9237 START is the builtin enum at which to start.
9238 END is the builtin enum at which to end. */
0ac081f6 9239static void
a2369ed3 9240enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9241 enum rs6000_builtins start,
a2369ed3 9242 enum rs6000_builtins end)
a3170dc6
AH
9243{
9244 int i;
9245
9246 for (i = 0; i < size; ++i)
9247 if (desc[i].code == start)
9248 break;
9249
9250 if (i == size)
9251 return;
9252
9253 for (; i < size; ++i)
9254 {
9255 /* Flip all the bits on. */
9256 desc[i].mask = target_flags;
9257 if (desc[i].code == end)
9258 break;
9259 }
9260}
9261
9262static void
863d938c 9263spe_init_builtins (void)
0ac081f6 9264{
a3170dc6
AH
9265 tree endlink = void_list_node;
9266 tree puint_type_node = build_pointer_type (unsigned_type_node);
9267 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9268 struct builtin_description *d;
0ac081f6
AH
9269 size_t i;
9270
a3170dc6
AH
9271 tree v2si_ftype_4_v2si
9272 = build_function_type
3fdaa45a
AH
9273 (opaque_V2SI_type_node,
9274 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9275 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9276 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9277 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9278 endlink)))));
9279
9280 tree v2sf_ftype_4_v2sf
9281 = build_function_type
3fdaa45a
AH
9282 (opaque_V2SF_type_node,
9283 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9284 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9285 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9286 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9287 endlink)))));
9288
9289 tree int_ftype_int_v2si_v2si
9290 = build_function_type
9291 (integer_type_node,
9292 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9293 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9294 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9295 endlink))));
9296
9297 tree int_ftype_int_v2sf_v2sf
9298 = build_function_type
9299 (integer_type_node,
9300 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9301 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9302 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9303 endlink))));
9304
9305 tree void_ftype_v2si_puint_int
9306 = build_function_type (void_type_node,
3fdaa45a 9307 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9308 tree_cons (NULL_TREE, puint_type_node,
9309 tree_cons (NULL_TREE,
9310 integer_type_node,
9311 endlink))));
9312
9313 tree void_ftype_v2si_puint_char
9314 = build_function_type (void_type_node,
3fdaa45a 9315 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9316 tree_cons (NULL_TREE, puint_type_node,
9317 tree_cons (NULL_TREE,
9318 char_type_node,
9319 endlink))));
9320
9321 tree void_ftype_v2si_pv2si_int
9322 = build_function_type (void_type_node,
3fdaa45a 9323 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9324 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9325 tree_cons (NULL_TREE,
9326 integer_type_node,
9327 endlink))));
9328
9329 tree void_ftype_v2si_pv2si_char
9330 = build_function_type (void_type_node,
3fdaa45a 9331 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9332 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9333 tree_cons (NULL_TREE,
9334 char_type_node,
9335 endlink))));
9336
9337 tree void_ftype_int
9338 = build_function_type (void_type_node,
9339 tree_cons (NULL_TREE, integer_type_node, endlink));
9340
9341 tree int_ftype_void
36e8d515 9342 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9343
9344 tree v2si_ftype_pv2si_int
3fdaa45a 9345 = build_function_type (opaque_V2SI_type_node,
6035d635 9346 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9347 tree_cons (NULL_TREE, integer_type_node,
9348 endlink)));
9349
9350 tree v2si_ftype_puint_int
3fdaa45a 9351 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9352 tree_cons (NULL_TREE, puint_type_node,
9353 tree_cons (NULL_TREE, integer_type_node,
9354 endlink)));
9355
9356 tree v2si_ftype_pushort_int
3fdaa45a 9357 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9358 tree_cons (NULL_TREE, pushort_type_node,
9359 tree_cons (NULL_TREE, integer_type_node,
9360 endlink)));
9361
00332c9f
AH
9362 tree v2si_ftype_signed_char
9363 = build_function_type (opaque_V2SI_type_node,
9364 tree_cons (NULL_TREE, signed_char_type_node,
9365 endlink));
9366
a3170dc6
AH
9367 /* The initialization of the simple binary and unary builtins is
9368 done in rs6000_common_init_builtins, but we have to enable the
9369 mask bits here manually because we have run out of `target_flags'
9370 bits. We really need to redesign this mask business. */
9371
9372 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9373 ARRAY_SIZE (bdesc_2arg),
9374 SPE_BUILTIN_EVADDW,
9375 SPE_BUILTIN_EVXOR);
9376 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9377 ARRAY_SIZE (bdesc_1arg),
9378 SPE_BUILTIN_EVABS,
9379 SPE_BUILTIN_EVSUBFUSIAAW);
9380 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9381 ARRAY_SIZE (bdesc_spe_predicates),
9382 SPE_BUILTIN_EVCMPEQ,
9383 SPE_BUILTIN_EVFSTSTLT);
9384 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9385 ARRAY_SIZE (bdesc_spe_evsel),
9386 SPE_BUILTIN_EVSEL_CMPGTS,
9387 SPE_BUILTIN_EVSEL_FSTSTEQ);
9388
36252949
AH
9389 (*lang_hooks.decls.pushdecl)
9390 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9391 opaque_V2SI_type_node));
9392
a3170dc6 9393 /* Initialize irregular SPE builtins. */
f676971a 9394
a3170dc6
AH
9395 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9396 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9397 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9398 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9399 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9400 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9401 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9402 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9403 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9404 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9405 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9406 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9407 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9408 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9409 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9410 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9411 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9412 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9413
9414 /* Loads. */
9415 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9416 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9417 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9418 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9419 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9420 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9421 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9422 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9423 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9424 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9425 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9426 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9427 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9428 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9429 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9430 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9431 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9432 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9433 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9434 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9435 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9436 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9437
9438 /* Predicates. */
9439 d = (struct builtin_description *) bdesc_spe_predicates;
9440 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9441 {
9442 tree type;
9443
9444 switch (insn_data[d->icode].operand[1].mode)
9445 {
9446 case V2SImode:
9447 type = int_ftype_int_v2si_v2si;
9448 break;
9449 case V2SFmode:
9450 type = int_ftype_int_v2sf_v2sf;
9451 break;
9452 default:
37409796 9453 gcc_unreachable ();
a3170dc6
AH
9454 }
9455
9456 def_builtin (d->mask, d->name, type, d->code);
9457 }
9458
9459 /* Evsel predicates. */
9460 d = (struct builtin_description *) bdesc_spe_evsel;
9461 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9462 {
9463 tree type;
9464
9465 switch (insn_data[d->icode].operand[1].mode)
9466 {
9467 case V2SImode:
9468 type = v2si_ftype_4_v2si;
9469 break;
9470 case V2SFmode:
9471 type = v2sf_ftype_4_v2sf;
9472 break;
9473 default:
37409796 9474 gcc_unreachable ();
a3170dc6
AH
9475 }
9476
9477 def_builtin (d->mask, d->name, type, d->code);
9478 }
9479}
9480
96038623
DE
9481static void
9482paired_init_builtins (void)
9483{
23a651fc 9484 const struct builtin_description *d;
96038623
DE
9485 size_t i;
9486 tree endlink = void_list_node;
9487
9488 tree int_ftype_int_v2sf_v2sf
9489 = build_function_type
9490 (integer_type_node,
9491 tree_cons (NULL_TREE, integer_type_node,
9492 tree_cons (NULL_TREE, V2SF_type_node,
9493 tree_cons (NULL_TREE, V2SF_type_node,
9494 endlink))));
9495 tree pcfloat_type_node =
9496 build_pointer_type (build_qualified_type
9497 (float_type_node, TYPE_QUAL_CONST));
9498
9499 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9500 long_integer_type_node,
9501 pcfloat_type_node,
9502 NULL_TREE);
9503 tree void_ftype_v2sf_long_pcfloat =
9504 build_function_type_list (void_type_node,
9505 V2SF_type_node,
9506 long_integer_type_node,
9507 pcfloat_type_node,
9508 NULL_TREE);
9509
9510
9511 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9512 PAIRED_BUILTIN_LX);
9513
9514
9515 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9516 PAIRED_BUILTIN_STX);
9517
9518 /* Predicates. */
23a651fc 9519 d = bdesc_paired_preds;
96038623
DE
9520 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9521 {
9522 tree type;
9523
9524 switch (insn_data[d->icode].operand[1].mode)
9525 {
9526 case V2SFmode:
9527 type = int_ftype_int_v2sf_v2sf;
9528 break;
9529 default:
9530 gcc_unreachable ();
9531 }
9532
9533 def_builtin (d->mask, d->name, type, d->code);
9534 }
9535}
9536
a3170dc6 9537static void
863d938c 9538altivec_init_builtins (void)
a3170dc6 9539{
586de218
KG
9540 const struct builtin_description *d;
9541 const struct builtin_description_predicates *dp;
a3170dc6 9542 size_t i;
7a4eca66
DE
9543 tree ftype;
9544
a3170dc6
AH
9545 tree pfloat_type_node = build_pointer_type (float_type_node);
9546 tree pint_type_node = build_pointer_type (integer_type_node);
9547 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9548 tree pchar_type_node = build_pointer_type (char_type_node);
9549
9550 tree pvoid_type_node = build_pointer_type (void_type_node);
9551
0dbc3651
ZW
9552 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9553 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9554 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9555 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9556
9557 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9558
58646b77
PB
9559 tree int_ftype_opaque
9560 = build_function_type_list (integer_type_node,
9561 opaque_V4SI_type_node, NULL_TREE);
9562
9563 tree opaque_ftype_opaque_int
9564 = build_function_type_list (opaque_V4SI_type_node,
9565 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9566 tree opaque_ftype_opaque_opaque_int
9567 = build_function_type_list (opaque_V4SI_type_node,
9568 opaque_V4SI_type_node, opaque_V4SI_type_node,
9569 integer_type_node, NULL_TREE);
9570 tree int_ftype_int_opaque_opaque
9571 = build_function_type_list (integer_type_node,
9572 integer_type_node, opaque_V4SI_type_node,
9573 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9574 tree int_ftype_int_v4si_v4si
9575 = build_function_type_list (integer_type_node,
9576 integer_type_node, V4SI_type_node,
9577 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9578 tree v4sf_ftype_pcfloat
9579 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9580 tree void_ftype_pfloat_v4sf
b4de2f7d 9581 = build_function_type_list (void_type_node,
a3170dc6 9582 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9583 tree v4si_ftype_pcint
9584 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9585 tree void_ftype_pint_v4si
b4de2f7d
AH
9586 = build_function_type_list (void_type_node,
9587 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9588 tree v8hi_ftype_pcshort
9589 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9590 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9591 = build_function_type_list (void_type_node,
9592 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9593 tree v16qi_ftype_pcchar
9594 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9595 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9596 = build_function_type_list (void_type_node,
9597 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9598 tree void_ftype_v4si
b4de2f7d 9599 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9600 tree v8hi_ftype_void
9601 = build_function_type (V8HI_type_node, void_list_node);
9602 tree void_ftype_void
9603 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9604 tree void_ftype_int
9605 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9606
58646b77
PB
9607 tree opaque_ftype_long_pcvoid
9608 = build_function_type_list (opaque_V4SI_type_node,
9609 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9610 tree v16qi_ftype_long_pcvoid
a3170dc6 9611 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9612 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9613 tree v8hi_ftype_long_pcvoid
a3170dc6 9614 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9615 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9616 tree v4si_ftype_long_pcvoid
a3170dc6 9617 = build_function_type_list (V4SI_type_node,
b4a62fa0 9618 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9619
58646b77
PB
9620 tree void_ftype_opaque_long_pvoid
9621 = build_function_type_list (void_type_node,
9622 opaque_V4SI_type_node, long_integer_type_node,
9623 pvoid_type_node, NULL_TREE);
b4a62fa0 9624 tree void_ftype_v4si_long_pvoid
b4de2f7d 9625 = build_function_type_list (void_type_node,
b4a62fa0 9626 V4SI_type_node, long_integer_type_node,
b4de2f7d 9627 pvoid_type_node, NULL_TREE);
b4a62fa0 9628 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9629 = build_function_type_list (void_type_node,
b4a62fa0 9630 V16QI_type_node, long_integer_type_node,
b4de2f7d 9631 pvoid_type_node, NULL_TREE);
b4a62fa0 9632 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9633 = build_function_type_list (void_type_node,
b4a62fa0 9634 V8HI_type_node, long_integer_type_node,
b4de2f7d 9635 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9636 tree int_ftype_int_v8hi_v8hi
9637 = build_function_type_list (integer_type_node,
9638 integer_type_node, V8HI_type_node,
9639 V8HI_type_node, NULL_TREE);
9640 tree int_ftype_int_v16qi_v16qi
9641 = build_function_type_list (integer_type_node,
9642 integer_type_node, V16QI_type_node,
9643 V16QI_type_node, NULL_TREE);
9644 tree int_ftype_int_v4sf_v4sf
9645 = build_function_type_list (integer_type_node,
9646 integer_type_node, V4SF_type_node,
9647 V4SF_type_node, NULL_TREE);
9648 tree v4si_ftype_v4si
9649 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9650 tree v8hi_ftype_v8hi
9651 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9652 tree v16qi_ftype_v16qi
9653 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9654 tree v4sf_ftype_v4sf
9655 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9656 tree void_ftype_pcvoid_int_int
a3170dc6 9657 = build_function_type_list (void_type_node,
0dbc3651 9658 pcvoid_type_node, integer_type_node,
8bb418a3 9659 integer_type_node, NULL_TREE);
8bb418a3 9660
0dbc3651
ZW
9661 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9662 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9663 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9664 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9665 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9666 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9667 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9668 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9669 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9670 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9671 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9672 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9673 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9674 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9676 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9678 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9680 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9682 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9684 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9686 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9690 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9693 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9694 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9695 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9696 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9697 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9698 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9699 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9700 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9701 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9702 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9703 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9704 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9705 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9706 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9707
9708 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9709
9710 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9711 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9712 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9713 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9714 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9715 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9716 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9717 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9718 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9719 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9720
a3170dc6 9721 /* Add the DST variants. */
586de218 9722 d = bdesc_dst;
a3170dc6 9723 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9724 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9725
9726 /* Initialize the predicates. */
586de218 9727 dp = bdesc_altivec_preds;
a3170dc6
AH
9728 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9729 {
9730 enum machine_mode mode1;
9731 tree type;
58646b77
PB
9732 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9733 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9734
58646b77
PB
9735 if (is_overloaded)
9736 mode1 = VOIDmode;
9737 else
9738 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9739
9740 switch (mode1)
9741 {
58646b77
PB
9742 case VOIDmode:
9743 type = int_ftype_int_opaque_opaque;
9744 break;
a3170dc6
AH
9745 case V4SImode:
9746 type = int_ftype_int_v4si_v4si;
9747 break;
9748 case V8HImode:
9749 type = int_ftype_int_v8hi_v8hi;
9750 break;
9751 case V16QImode:
9752 type = int_ftype_int_v16qi_v16qi;
9753 break;
9754 case V4SFmode:
9755 type = int_ftype_int_v4sf_v4sf;
9756 break;
9757 default:
37409796 9758 gcc_unreachable ();
a3170dc6 9759 }
f676971a 9760
a3170dc6
AH
9761 def_builtin (dp->mask, dp->name, type, dp->code);
9762 }
9763
9764 /* Initialize the abs* operators. */
586de218 9765 d = bdesc_abs;
a3170dc6
AH
9766 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9767 {
9768 enum machine_mode mode0;
9769 tree type;
9770
9771 mode0 = insn_data[d->icode].operand[0].mode;
9772
9773 switch (mode0)
9774 {
9775 case V4SImode:
9776 type = v4si_ftype_v4si;
9777 break;
9778 case V8HImode:
9779 type = v8hi_ftype_v8hi;
9780 break;
9781 case V16QImode:
9782 type = v16qi_ftype_v16qi;
9783 break;
9784 case V4SFmode:
9785 type = v4sf_ftype_v4sf;
9786 break;
9787 default:
37409796 9788 gcc_unreachable ();
a3170dc6 9789 }
f676971a 9790
a3170dc6
AH
9791 def_builtin (d->mask, d->name, type, d->code);
9792 }
7ccf35ed 9793
13c62176
DN
9794 if (TARGET_ALTIVEC)
9795 {
9796 tree decl;
9797
9798 /* Initialize target builtin that implements
9799 targetm.vectorize.builtin_mask_for_load. */
9800
c79efc4d
RÁE
9801 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9802 v16qi_ftype_long_pcvoid,
9803 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9804 BUILT_IN_MD, NULL, NULL_TREE);
9805 TREE_READONLY (decl) = 1;
13c62176
DN
9806 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9807 altivec_builtin_mask_for_load = decl;
13c62176 9808 }
7a4eca66
DE
9809
9810 /* Access to the vec_init patterns. */
9811 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9812 integer_type_node, integer_type_node,
9813 integer_type_node, NULL_TREE);
9814 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9815 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9816
9817 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9818 short_integer_type_node,
9819 short_integer_type_node,
9820 short_integer_type_node,
9821 short_integer_type_node,
9822 short_integer_type_node,
9823 short_integer_type_node,
9824 short_integer_type_node, NULL_TREE);
9825 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9826 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9827
9828 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9829 char_type_node, char_type_node,
9830 char_type_node, char_type_node,
9831 char_type_node, char_type_node,
9832 char_type_node, char_type_node,
9833 char_type_node, char_type_node,
9834 char_type_node, char_type_node,
9835 char_type_node, char_type_node,
9836 char_type_node, NULL_TREE);
9837 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9838 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9839
9840 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9841 float_type_node, float_type_node,
9842 float_type_node, NULL_TREE);
9843 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9844 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9845
9846 /* Access to the vec_set patterns. */
9847 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9848 intSI_type_node,
9849 integer_type_node, NULL_TREE);
9850 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9851 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9852
9853 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9854 intHI_type_node,
9855 integer_type_node, NULL_TREE);
9856 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9857 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9858
9859 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9860 intQI_type_node,
9861 integer_type_node, NULL_TREE);
9862 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9863 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9864
9865 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9866 float_type_node,
9867 integer_type_node, NULL_TREE);
9868 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9869 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9870
9871 /* Access to the vec_extract patterns. */
9872 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9873 integer_type_node, NULL_TREE);
9874 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9875 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9876
9877 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9878 integer_type_node, NULL_TREE);
9879 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9880 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9881
9882 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9883 integer_type_node, NULL_TREE);
9884 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9885 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9886
9887 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9888 integer_type_node, NULL_TREE);
9889 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9890 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9891}
9892
9893static void
863d938c 9894rs6000_common_init_builtins (void)
a3170dc6 9895{
586de218 9896 const struct builtin_description *d;
a3170dc6
AH
9897 size_t i;
9898
96038623
DE
9899 tree v2sf_ftype_v2sf_v2sf_v2sf
9900 = build_function_type_list (V2SF_type_node,
9901 V2SF_type_node, V2SF_type_node,
9902 V2SF_type_node, NULL_TREE);
9903
a3170dc6
AH
9904 tree v4sf_ftype_v4sf_v4sf_v16qi
9905 = build_function_type_list (V4SF_type_node,
9906 V4SF_type_node, V4SF_type_node,
9907 V16QI_type_node, NULL_TREE);
9908 tree v4si_ftype_v4si_v4si_v16qi
9909 = build_function_type_list (V4SI_type_node,
9910 V4SI_type_node, V4SI_type_node,
9911 V16QI_type_node, NULL_TREE);
9912 tree v8hi_ftype_v8hi_v8hi_v16qi
9913 = build_function_type_list (V8HI_type_node,
9914 V8HI_type_node, V8HI_type_node,
9915 V16QI_type_node, NULL_TREE);
9916 tree v16qi_ftype_v16qi_v16qi_v16qi
9917 = build_function_type_list (V16QI_type_node,
9918 V16QI_type_node, V16QI_type_node,
9919 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9920 tree v4si_ftype_int
9921 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9922 tree v8hi_ftype_int
9923 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9924 tree v16qi_ftype_int
9925 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9926 tree v8hi_ftype_v16qi
9927 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9928 tree v4sf_ftype_v4sf
9929 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9930
9931 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9932 = build_function_type_list (opaque_V2SI_type_node,
9933 opaque_V2SI_type_node,
9934 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9935
96038623 9936 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9937 = build_function_type_list (opaque_V2SF_type_node,
9938 opaque_V2SF_type_node,
9939 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9940
96038623
DE
9941 tree v2sf_ftype_v2sf_v2sf
9942 = build_function_type_list (V2SF_type_node,
9943 V2SF_type_node,
9944 V2SF_type_node, NULL_TREE);
9945
9946
a3170dc6 9947 tree v2si_ftype_int_int
2abe3e28 9948 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9949 integer_type_node, integer_type_node,
9950 NULL_TREE);
9951
58646b77
PB
9952 tree opaque_ftype_opaque
9953 = build_function_type_list (opaque_V4SI_type_node,
9954 opaque_V4SI_type_node, NULL_TREE);
9955
a3170dc6 9956 tree v2si_ftype_v2si
2abe3e28
AH
9957 = build_function_type_list (opaque_V2SI_type_node,
9958 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9959
96038623 9960 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9961 = build_function_type_list (opaque_V2SF_type_node,
9962 opaque_V2SF_type_node, NULL_TREE);
f676971a 9963
96038623
DE
9964 tree v2sf_ftype_v2sf
9965 = build_function_type_list (V2SF_type_node,
9966 V2SF_type_node, NULL_TREE);
9967
a3170dc6 9968 tree v2sf_ftype_v2si
2abe3e28
AH
9969 = build_function_type_list (opaque_V2SF_type_node,
9970 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9971
9972 tree v2si_ftype_v2sf
2abe3e28
AH
9973 = build_function_type_list (opaque_V2SI_type_node,
9974 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9975
9976 tree v2si_ftype_v2si_char
2abe3e28
AH
9977 = build_function_type_list (opaque_V2SI_type_node,
9978 opaque_V2SI_type_node,
9979 char_type_node, NULL_TREE);
a3170dc6
AH
9980
9981 tree v2si_ftype_int_char
2abe3e28 9982 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9983 integer_type_node, char_type_node, NULL_TREE);
9984
9985 tree v2si_ftype_char
2abe3e28
AH
9986 = build_function_type_list (opaque_V2SI_type_node,
9987 char_type_node, NULL_TREE);
a3170dc6
AH
9988
9989 tree int_ftype_int_int
9990 = build_function_type_list (integer_type_node,
9991 integer_type_node, integer_type_node,
9992 NULL_TREE);
95385cbb 9993
58646b77
PB
9994 tree opaque_ftype_opaque_opaque
9995 = build_function_type_list (opaque_V4SI_type_node,
9996 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9997 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9998 = build_function_type_list (V4SI_type_node,
9999 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10000 tree v4sf_ftype_v4si_int
b4de2f7d 10001 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10002 V4SI_type_node, integer_type_node, NULL_TREE);
10003 tree v4si_ftype_v4sf_int
b4de2f7d 10004 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10005 V4SF_type_node, integer_type_node, NULL_TREE);
10006 tree v4si_ftype_v4si_int
b4de2f7d 10007 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10008 V4SI_type_node, integer_type_node, NULL_TREE);
10009 tree v8hi_ftype_v8hi_int
b4de2f7d 10010 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10011 V8HI_type_node, integer_type_node, NULL_TREE);
10012 tree v16qi_ftype_v16qi_int
b4de2f7d 10013 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10014 V16QI_type_node, integer_type_node, NULL_TREE);
10015 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10016 = build_function_type_list (V16QI_type_node,
10017 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10018 integer_type_node, NULL_TREE);
10019 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10020 = build_function_type_list (V8HI_type_node,
10021 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10022 integer_type_node, NULL_TREE);
10023 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10024 = build_function_type_list (V4SI_type_node,
10025 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10026 integer_type_node, NULL_TREE);
10027 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10028 = build_function_type_list (V4SF_type_node,
10029 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10030 integer_type_node, NULL_TREE);
0ac081f6 10031 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10032 = build_function_type_list (V4SF_type_node,
10033 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10034 tree opaque_ftype_opaque_opaque_opaque
10035 = build_function_type_list (opaque_V4SI_type_node,
10036 opaque_V4SI_type_node, opaque_V4SI_type_node,
10037 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10038 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10039 = build_function_type_list (V4SF_type_node,
10040 V4SF_type_node, V4SF_type_node,
10041 V4SI_type_node, NULL_TREE);
2212663f 10042 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10043 = build_function_type_list (V4SF_type_node,
10044 V4SF_type_node, V4SF_type_node,
10045 V4SF_type_node, NULL_TREE);
f676971a 10046 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10047 = build_function_type_list (V4SI_type_node,
10048 V4SI_type_node, V4SI_type_node,
10049 V4SI_type_node, NULL_TREE);
0ac081f6 10050 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10051 = build_function_type_list (V8HI_type_node,
10052 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10053 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10054 = build_function_type_list (V8HI_type_node,
10055 V8HI_type_node, V8HI_type_node,
10056 V8HI_type_node, NULL_TREE);
c4ad648e 10057 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10058 = build_function_type_list (V4SI_type_node,
10059 V8HI_type_node, V8HI_type_node,
10060 V4SI_type_node, NULL_TREE);
c4ad648e 10061 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10062 = build_function_type_list (V4SI_type_node,
10063 V16QI_type_node, V16QI_type_node,
10064 V4SI_type_node, NULL_TREE);
0ac081f6 10065 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10066 = build_function_type_list (V16QI_type_node,
10067 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10068 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10069 = build_function_type_list (V4SI_type_node,
10070 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10071 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10072 = build_function_type_list (V8HI_type_node,
10073 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10074 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10075 = build_function_type_list (V4SI_type_node,
10076 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10077 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10078 = build_function_type_list (V8HI_type_node,
10079 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10080 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10081 = build_function_type_list (V16QI_type_node,
10082 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10083 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10084 = build_function_type_list (V4SI_type_node,
10085 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10086 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10087 = build_function_type_list (V4SI_type_node,
10088 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10089 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10090 = build_function_type_list (V4SI_type_node,
10091 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10092 tree v4si_ftype_v8hi
10093 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10094 tree int_ftype_v4si_v4si
10095 = build_function_type_list (integer_type_node,
10096 V4SI_type_node, V4SI_type_node, NULL_TREE);
10097 tree int_ftype_v4sf_v4sf
10098 = build_function_type_list (integer_type_node,
10099 V4SF_type_node, V4SF_type_node, NULL_TREE);
10100 tree int_ftype_v16qi_v16qi
10101 = build_function_type_list (integer_type_node,
10102 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10103 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10104 = build_function_type_list (integer_type_node,
10105 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10106
6f317ef3 10107 /* Add the simple ternary operators. */
586de218 10108 d = bdesc_3arg;
ca7558fc 10109 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10110 {
2212663f
DB
10111 enum machine_mode mode0, mode1, mode2, mode3;
10112 tree type;
58646b77
PB
10113 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10114 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10115
58646b77
PB
10116 if (is_overloaded)
10117 {
10118 mode0 = VOIDmode;
10119 mode1 = VOIDmode;
10120 mode2 = VOIDmode;
10121 mode3 = VOIDmode;
10122 }
10123 else
10124 {
10125 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10126 continue;
f676971a 10127
58646b77
PB
10128 mode0 = insn_data[d->icode].operand[0].mode;
10129 mode1 = insn_data[d->icode].operand[1].mode;
10130 mode2 = insn_data[d->icode].operand[2].mode;
10131 mode3 = insn_data[d->icode].operand[3].mode;
10132 }
bb8df8a6 10133
2212663f
DB
10134 /* When all four are of the same mode. */
10135 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10136 {
10137 switch (mode0)
10138 {
58646b77
PB
10139 case VOIDmode:
10140 type = opaque_ftype_opaque_opaque_opaque;
10141 break;
617e0e1d
DB
10142 case V4SImode:
10143 type = v4si_ftype_v4si_v4si_v4si;
10144 break;
2212663f
DB
10145 case V4SFmode:
10146 type = v4sf_ftype_v4sf_v4sf_v4sf;
10147 break;
10148 case V8HImode:
10149 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10150 break;
2212663f
DB
10151 case V16QImode:
10152 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10153 break;
96038623
DE
10154 case V2SFmode:
10155 type = v2sf_ftype_v2sf_v2sf_v2sf;
10156 break;
2212663f 10157 default:
37409796 10158 gcc_unreachable ();
2212663f
DB
10159 }
10160 }
10161 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10162 {
2212663f
DB
10163 switch (mode0)
10164 {
10165 case V4SImode:
10166 type = v4si_ftype_v4si_v4si_v16qi;
10167 break;
10168 case V4SFmode:
10169 type = v4sf_ftype_v4sf_v4sf_v16qi;
10170 break;
10171 case V8HImode:
10172 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10173 break;
2212663f
DB
10174 case V16QImode:
10175 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10176 break;
2212663f 10177 default:
37409796 10178 gcc_unreachable ();
2212663f
DB
10179 }
10180 }
f676971a 10181 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10182 && mode3 == V4SImode)
24408032 10183 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10184 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10185 && mode3 == V4SImode)
24408032 10186 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10187 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10188 && mode3 == V4SImode)
24408032
AH
10189 type = v4sf_ftype_v4sf_v4sf_v4si;
10190
a7b376ee 10191 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10192 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10193 && mode3 == QImode)
b9e4e5d1 10194 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10195
a7b376ee 10196 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10197 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10198 && mode3 == QImode)
b9e4e5d1 10199 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10200
a7b376ee 10201 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10202 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10203 && mode3 == QImode)
b9e4e5d1 10204 type = v4si_ftype_v4si_v4si_int;
24408032 10205
a7b376ee 10206 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10207 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10208 && mode3 == QImode)
b9e4e5d1 10209 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10210
2212663f 10211 else
37409796 10212 gcc_unreachable ();
2212663f
DB
10213
10214 def_builtin (d->mask, d->name, type, d->code);
10215 }
10216
0ac081f6 10217 /* Add the simple binary operators. */
00b960c7 10218 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10219 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10220 {
10221 enum machine_mode mode0, mode1, mode2;
10222 tree type;
58646b77
PB
10223 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10224 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10225
58646b77
PB
10226 if (is_overloaded)
10227 {
10228 mode0 = VOIDmode;
10229 mode1 = VOIDmode;
10230 mode2 = VOIDmode;
10231 }
10232 else
bb8df8a6 10233 {
58646b77
PB
10234 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10235 continue;
f676971a 10236
58646b77
PB
10237 mode0 = insn_data[d->icode].operand[0].mode;
10238 mode1 = insn_data[d->icode].operand[1].mode;
10239 mode2 = insn_data[d->icode].operand[2].mode;
10240 }
0ac081f6
AH
10241
10242 /* When all three operands are of the same mode. */
10243 if (mode0 == mode1 && mode1 == mode2)
10244 {
10245 switch (mode0)
10246 {
58646b77
PB
10247 case VOIDmode:
10248 type = opaque_ftype_opaque_opaque;
10249 break;
0ac081f6
AH
10250 case V4SFmode:
10251 type = v4sf_ftype_v4sf_v4sf;
10252 break;
10253 case V4SImode:
10254 type = v4si_ftype_v4si_v4si;
10255 break;
10256 case V16QImode:
10257 type = v16qi_ftype_v16qi_v16qi;
10258 break;
10259 case V8HImode:
10260 type = v8hi_ftype_v8hi_v8hi;
10261 break;
a3170dc6
AH
10262 case V2SImode:
10263 type = v2si_ftype_v2si_v2si;
10264 break;
96038623
DE
10265 case V2SFmode:
10266 if (TARGET_PAIRED_FLOAT)
10267 type = v2sf_ftype_v2sf_v2sf;
10268 else
10269 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10270 break;
10271 case SImode:
10272 type = int_ftype_int_int;
10273 break;
0ac081f6 10274 default:
37409796 10275 gcc_unreachable ();
0ac081f6
AH
10276 }
10277 }
10278
10279 /* A few other combos we really don't want to do manually. */
10280
10281 /* vint, vfloat, vfloat. */
10282 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10283 type = v4si_ftype_v4sf_v4sf;
10284
10285 /* vshort, vchar, vchar. */
10286 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10287 type = v8hi_ftype_v16qi_v16qi;
10288
10289 /* vint, vshort, vshort. */
10290 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10291 type = v4si_ftype_v8hi_v8hi;
10292
10293 /* vshort, vint, vint. */
10294 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10295 type = v8hi_ftype_v4si_v4si;
10296
10297 /* vchar, vshort, vshort. */
10298 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10299 type = v16qi_ftype_v8hi_v8hi;
10300
10301 /* vint, vchar, vint. */
10302 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10303 type = v4si_ftype_v16qi_v4si;
10304
fa066a23
AH
10305 /* vint, vchar, vchar. */
10306 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10307 type = v4si_ftype_v16qi_v16qi;
10308
0ac081f6
AH
10309 /* vint, vshort, vint. */
10310 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10311 type = v4si_ftype_v8hi_v4si;
f676971a 10312
a7b376ee 10313 /* vint, vint, 5-bit literal. */
2212663f 10314 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10315 type = v4si_ftype_v4si_int;
f676971a 10316
a7b376ee 10317 /* vshort, vshort, 5-bit literal. */
2212663f 10318 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10319 type = v8hi_ftype_v8hi_int;
f676971a 10320
a7b376ee 10321 /* vchar, vchar, 5-bit literal. */
2212663f 10322 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10323 type = v16qi_ftype_v16qi_int;
0ac081f6 10324
a7b376ee 10325 /* vfloat, vint, 5-bit literal. */
617e0e1d 10326 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10327 type = v4sf_ftype_v4si_int;
f676971a 10328
a7b376ee 10329 /* vint, vfloat, 5-bit literal. */
617e0e1d 10330 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10331 type = v4si_ftype_v4sf_int;
617e0e1d 10332
a3170dc6
AH
10333 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10334 type = v2si_ftype_int_int;
10335
10336 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10337 type = v2si_ftype_v2si_char;
10338
10339 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10340 type = v2si_ftype_int_char;
10341
37409796 10342 else
0ac081f6 10343 {
37409796
NS
10344 /* int, x, x. */
10345 gcc_assert (mode0 == SImode);
0ac081f6
AH
10346 switch (mode1)
10347 {
10348 case V4SImode:
10349 type = int_ftype_v4si_v4si;
10350 break;
10351 case V4SFmode:
10352 type = int_ftype_v4sf_v4sf;
10353 break;
10354 case V16QImode:
10355 type = int_ftype_v16qi_v16qi;
10356 break;
10357 case V8HImode:
10358 type = int_ftype_v8hi_v8hi;
10359 break;
10360 default:
37409796 10361 gcc_unreachable ();
0ac081f6
AH
10362 }
10363 }
10364
2212663f
DB
10365 def_builtin (d->mask, d->name, type, d->code);
10366 }
24408032 10367
2212663f
DB
10368 /* Add the simple unary operators. */
10369 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10370 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10371 {
10372 enum machine_mode mode0, mode1;
10373 tree type;
58646b77
PB
10374 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10375 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10376
10377 if (is_overloaded)
10378 {
10379 mode0 = VOIDmode;
10380 mode1 = VOIDmode;
10381 }
10382 else
10383 {
10384 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10385 continue;
bb8df8a6 10386
58646b77
PB
10387 mode0 = insn_data[d->icode].operand[0].mode;
10388 mode1 = insn_data[d->icode].operand[1].mode;
10389 }
2212663f
DB
10390
10391 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10392 type = v4si_ftype_int;
2212663f 10393 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10394 type = v8hi_ftype_int;
2212663f 10395 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10396 type = v16qi_ftype_int;
58646b77
PB
10397 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10398 type = opaque_ftype_opaque;
617e0e1d
DB
10399 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10400 type = v4sf_ftype_v4sf;
20e26713
AH
10401 else if (mode0 == V8HImode && mode1 == V16QImode)
10402 type = v8hi_ftype_v16qi;
10403 else if (mode0 == V4SImode && mode1 == V8HImode)
10404 type = v4si_ftype_v8hi;
a3170dc6
AH
10405 else if (mode0 == V2SImode && mode1 == V2SImode)
10406 type = v2si_ftype_v2si;
10407 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10408 {
10409 if (TARGET_PAIRED_FLOAT)
10410 type = v2sf_ftype_v2sf;
10411 else
10412 type = v2sf_ftype_v2sf_spe;
10413 }
a3170dc6
AH
10414 else if (mode0 == V2SFmode && mode1 == V2SImode)
10415 type = v2sf_ftype_v2si;
10416 else if (mode0 == V2SImode && mode1 == V2SFmode)
10417 type = v2si_ftype_v2sf;
10418 else if (mode0 == V2SImode && mode1 == QImode)
10419 type = v2si_ftype_char;
2212663f 10420 else
37409796 10421 gcc_unreachable ();
2212663f 10422
0ac081f6
AH
10423 def_builtin (d->mask, d->name, type, d->code);
10424 }
10425}
10426
c15c90bb
ZW
10427static void
10428rs6000_init_libfuncs (void)
10429{
602ea4d3
JJ
10430 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10431 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10432 {
602ea4d3
JJ
10433 /* AIX library routines for float->int conversion. */
10434 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10435 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10436 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10437 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10438 }
c15c90bb 10439
602ea4d3 10440 if (!TARGET_IEEEQUAD)
98c41d98 10441 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10442 if (!TARGET_XL_COMPAT)
10443 {
10444 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10445 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10446 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10447 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10448
17caeff2 10449 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10450 {
10451 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10452 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10453 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10454 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10455 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10456 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10457 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10458
10459 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10460 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10461 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10462 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10463 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10464 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10465 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10466 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10467 }
b26941b4
JM
10468
10469 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10470 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10471 }
10472 else
10473 {
10474 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10475 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10476 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10477 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10478 }
c9034561 10479 else
c15c90bb 10480 {
c9034561 10481 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10482
10483 set_optab_libfunc (add_optab, TFmode, "_q_add");
10484 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10485 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10486 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10487 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10488 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10489 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10490
c9034561
ZW
10491 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10492 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10493 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10494 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10495 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10496 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10497
85363ca0
ZW
10498 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10499 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10500 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10501 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10502 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10503 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10504 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10505 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10506 }
10507}
fba73eb1
DE
10508
10509\f
10510/* Expand a block clear operation, and return 1 if successful. Return 0
10511 if we should let the compiler generate normal code.
10512
10513 operands[0] is the destination
10514 operands[1] is the length
57e84f18 10515 operands[3] is the alignment */
fba73eb1
DE
10516
10517int
10518expand_block_clear (rtx operands[])
10519{
10520 rtx orig_dest = operands[0];
10521 rtx bytes_rtx = operands[1];
57e84f18 10522 rtx align_rtx = operands[3];
5514620a
GK
10523 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10524 HOST_WIDE_INT align;
10525 HOST_WIDE_INT bytes;
fba73eb1
DE
10526 int offset;
10527 int clear_bytes;
5514620a 10528 int clear_step;
fba73eb1
DE
10529
10530 /* If this is not a fixed size move, just call memcpy */
10531 if (! constp)
10532 return 0;
10533
37409796
NS
10534 /* This must be a fixed size alignment */
10535 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10536 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10537
10538 /* Anything to clear? */
10539 bytes = INTVAL (bytes_rtx);
10540 if (bytes <= 0)
10541 return 1;
10542
5514620a
GK
10543 /* Use the builtin memset after a point, to avoid huge code bloat.
10544 When optimize_size, avoid any significant code bloat; calling
10545 memset is about 4 instructions, so allow for one instruction to
10546 load zero and three to do clearing. */
10547 if (TARGET_ALTIVEC && align >= 128)
10548 clear_step = 16;
10549 else if (TARGET_POWERPC64 && align >= 32)
10550 clear_step = 8;
21d818ff
NF
10551 else if (TARGET_SPE && align >= 64)
10552 clear_step = 8;
5514620a
GK
10553 else
10554 clear_step = 4;
fba73eb1 10555
5514620a
GK
10556 if (optimize_size && bytes > 3 * clear_step)
10557 return 0;
10558 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10559 return 0;
10560
10561 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10562 {
fba73eb1
DE
10563 enum machine_mode mode = BLKmode;
10564 rtx dest;
f676971a 10565
5514620a
GK
10566 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10567 {
10568 clear_bytes = 16;
10569 mode = V4SImode;
10570 }
21d818ff
NF
10571 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10572 {
10573 clear_bytes = 8;
10574 mode = V2SImode;
10575 }
5514620a 10576 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10577 /* 64-bit loads and stores require word-aligned
10578 displacements. */
10579 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10580 {
10581 clear_bytes = 8;
10582 mode = DImode;
fba73eb1 10583 }
5514620a 10584 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10585 { /* move 4 bytes */
10586 clear_bytes = 4;
10587 mode = SImode;
fba73eb1 10588 }
ec53fc93 10589 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10590 { /* move 2 bytes */
10591 clear_bytes = 2;
10592 mode = HImode;
fba73eb1
DE
10593 }
10594 else /* move 1 byte at a time */
10595 {
10596 clear_bytes = 1;
10597 mode = QImode;
fba73eb1 10598 }
f676971a 10599
fba73eb1 10600 dest = adjust_address (orig_dest, mode, offset);
f676971a 10601
5514620a 10602 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10603 }
10604
10605 return 1;
10606}
10607
35aff10b 10608\f
7e69e155
MM
10609/* Expand a block move operation, and return 1 if successful. Return 0
10610 if we should let the compiler generate normal code.
10611
10612 operands[0] is the destination
10613 operands[1] is the source
10614 operands[2] is the length
10615 operands[3] is the alignment */
10616
3933e0e1
MM
10617#define MAX_MOVE_REG 4
10618
7e69e155 10619int
a2369ed3 10620expand_block_move (rtx operands[])
7e69e155 10621{
b6c9286a
MM
10622 rtx orig_dest = operands[0];
10623 rtx orig_src = operands[1];
7e69e155 10624 rtx bytes_rtx = operands[2];
7e69e155 10625 rtx align_rtx = operands[3];
3933e0e1 10626 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10627 int align;
3933e0e1
MM
10628 int bytes;
10629 int offset;
7e69e155 10630 int move_bytes;
cabfd258
GK
10631 rtx stores[MAX_MOVE_REG];
10632 int num_reg = 0;
7e69e155 10633
3933e0e1 10634 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10635 if (! constp)
3933e0e1
MM
10636 return 0;
10637
37409796
NS
10638 /* This must be a fixed size alignment */
10639 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10640 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10641
7e69e155 10642 /* Anything to move? */
3933e0e1
MM
10643 bytes = INTVAL (bytes_rtx);
10644 if (bytes <= 0)
7e69e155
MM
10645 return 1;
10646
ea9982a8 10647 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10648 reg_parm_stack_space. */
ea9982a8 10649 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10650 return 0;
10651
cabfd258 10652 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10653 {
cabfd258 10654 union {
70128ad9 10655 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10656 rtx (*mov) (rtx, rtx);
cabfd258
GK
10657 } gen_func;
10658 enum machine_mode mode = BLKmode;
10659 rtx src, dest;
f676971a 10660
5514620a
GK
10661 /* Altivec first, since it will be faster than a string move
10662 when it applies, and usually not significantly larger. */
10663 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10664 {
10665 move_bytes = 16;
10666 mode = V4SImode;
10667 gen_func.mov = gen_movv4si;
10668 }
21d818ff
NF
10669 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10670 {
10671 move_bytes = 8;
10672 mode = V2SImode;
10673 gen_func.mov = gen_movv2si;
10674 }
5514620a 10675 else if (TARGET_STRING
cabfd258
GK
10676 && bytes > 24 /* move up to 32 bytes at a time */
10677 && ! fixed_regs[5]
10678 && ! fixed_regs[6]
10679 && ! fixed_regs[7]
10680 && ! fixed_regs[8]
10681 && ! fixed_regs[9]
10682 && ! fixed_regs[10]
10683 && ! fixed_regs[11]
10684 && ! fixed_regs[12])
7e69e155 10685 {
cabfd258 10686 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10687 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10688 }
10689 else if (TARGET_STRING
10690 && bytes > 16 /* move up to 24 bytes at a time */
10691 && ! fixed_regs[5]
10692 && ! fixed_regs[6]
10693 && ! fixed_regs[7]
10694 && ! fixed_regs[8]
10695 && ! fixed_regs[9]
10696 && ! fixed_regs[10])
10697 {
10698 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10699 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10700 }
10701 else if (TARGET_STRING
10702 && bytes > 8 /* move up to 16 bytes at a time */
10703 && ! fixed_regs[5]
10704 && ! fixed_regs[6]
10705 && ! fixed_regs[7]
10706 && ! fixed_regs[8])
10707 {
10708 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10709 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10710 }
10711 else if (bytes >= 8 && TARGET_POWERPC64
10712 /* 64-bit loads and stores require word-aligned
10713 displacements. */
fba73eb1 10714 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10715 {
10716 move_bytes = 8;
10717 mode = DImode;
10718 gen_func.mov = gen_movdi;
10719 }
10720 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10721 { /* move up to 8 bytes at a time */
10722 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10723 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10724 }
cd7d9ca4 10725 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10726 { /* move 4 bytes */
10727 move_bytes = 4;
10728 mode = SImode;
10729 gen_func.mov = gen_movsi;
10730 }
ec53fc93 10731 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10732 { /* move 2 bytes */
10733 move_bytes = 2;
10734 mode = HImode;
10735 gen_func.mov = gen_movhi;
10736 }
10737 else if (TARGET_STRING && bytes > 1)
10738 { /* move up to 4 bytes at a time */
10739 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10740 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10741 }
10742 else /* move 1 byte at a time */
10743 {
10744 move_bytes = 1;
10745 mode = QImode;
10746 gen_func.mov = gen_movqi;
10747 }
f676971a 10748
cabfd258
GK
10749 src = adjust_address (orig_src, mode, offset);
10750 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10751
10752 if (mode != BLKmode)
cabfd258
GK
10753 {
10754 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10755
cabfd258
GK
10756 emit_insn ((*gen_func.mov) (tmp_reg, src));
10757 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10758 }
3933e0e1 10759
cabfd258
GK
10760 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10761 {
10762 int i;
10763 for (i = 0; i < num_reg; i++)
10764 emit_insn (stores[i]);
10765 num_reg = 0;
10766 }
35aff10b 10767
cabfd258 10768 if (mode == BLKmode)
7e69e155 10769 {
70128ad9 10770 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10771 patterns require zero offset. */
10772 if (!REG_P (XEXP (src, 0)))
b6c9286a 10773 {
cabfd258
GK
10774 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10775 src = replace_equiv_address (src, src_reg);
b6c9286a 10776 }
cabfd258 10777 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10778
cabfd258 10779 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10780 {
cabfd258
GK
10781 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10782 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10783 }
cabfd258 10784 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10785
70128ad9 10786 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10787 GEN_INT (move_bytes & 31),
10788 align_rtx));
7e69e155 10789 }
7e69e155
MM
10790 }
10791
10792 return 1;
10793}
10794
d62294f5 10795\f
9caa3eb2
DE
10796/* Return a string to perform a load_multiple operation.
10797 operands[0] is the vector.
10798 operands[1] is the source address.
10799 operands[2] is the first destination register. */
10800
10801const char *
a2369ed3 10802rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10803{
10804 /* We have to handle the case where the pseudo used to contain the address
10805 is assigned to one of the output registers. */
10806 int i, j;
10807 int words = XVECLEN (operands[0], 0);
10808 rtx xop[10];
10809
10810 if (XVECLEN (operands[0], 0) == 1)
10811 return "{l|lwz} %2,0(%1)";
10812
10813 for (i = 0; i < words; i++)
10814 if (refers_to_regno_p (REGNO (operands[2]) + i,
10815 REGNO (operands[2]) + i + 1, operands[1], 0))
10816 {
10817 if (i == words-1)
10818 {
10819 xop[0] = GEN_INT (4 * (words-1));
10820 xop[1] = operands[1];
10821 xop[2] = operands[2];
10822 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10823 return "";
10824 }
10825 else if (i == 0)
10826 {
10827 xop[0] = GEN_INT (4 * (words-1));
10828 xop[1] = operands[1];
10829 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10830 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10831 return "";
10832 }
10833 else
10834 {
10835 for (j = 0; j < words; j++)
10836 if (j != i)
10837 {
10838 xop[0] = GEN_INT (j * 4);
10839 xop[1] = operands[1];
10840 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10841 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10842 }
10843 xop[0] = GEN_INT (i * 4);
10844 xop[1] = operands[1];
10845 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10846 return "";
10847 }
10848 }
10849
10850 return "{lsi|lswi} %2,%1,%N0";
10851}
10852
9878760c 10853\f
a4f6c312
SS
10854/* A validation routine: say whether CODE, a condition code, and MODE
10855 match. The other alternatives either don't make sense or should
10856 never be generated. */
39a10a29 10857
48d72335 10858void
a2369ed3 10859validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10860{
37409796
NS
10861 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10862 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10863 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10864
10865 /* These don't make sense. */
37409796
NS
10866 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10867 || mode != CCUNSmode);
39a10a29 10868
37409796
NS
10869 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10870 || mode == CCUNSmode);
39a10a29 10871
37409796
NS
10872 gcc_assert (mode == CCFPmode
10873 || (code != ORDERED && code != UNORDERED
10874 && code != UNEQ && code != LTGT
10875 && code != UNGT && code != UNLT
10876 && code != UNGE && code != UNLE));
f676971a
EC
10877
10878 /* These should never be generated except for
bc9ec0e0 10879 flag_finite_math_only. */
37409796
NS
10880 gcc_assert (mode != CCFPmode
10881 || flag_finite_math_only
10882 || (code != LE && code != GE
10883 && code != UNEQ && code != LTGT
10884 && code != UNGT && code != UNLT));
39a10a29
GK
10885
10886 /* These are invalid; the information is not there. */
37409796 10887 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10888}
10889
9878760c
RK
10890\f
10891/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10892 mask required to convert the result of a rotate insn into a shift
b1765bde 10893 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10894
10895int
a2369ed3 10896includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10897{
e2c953b6
DE
10898 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10899
10900 shift_mask <<= INTVAL (shiftop);
9878760c 10901
b1765bde 10902 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10903}
10904
10905/* Similar, but for right shift. */
10906
10907int
a2369ed3 10908includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10909{
a7653a2c 10910 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10911
10912 shift_mask >>= INTVAL (shiftop);
10913
b1765bde 10914 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10915}
10916
c5059423
AM
10917/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10918 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10919 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10920
10921int
a2369ed3 10922includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10923{
c5059423
AM
10924 if (GET_CODE (andop) == CONST_INT)
10925 {
02071907 10926 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10927
c5059423 10928 c = INTVAL (andop);
02071907 10929 if (c == 0 || c == ~0)
c5059423 10930 return 0;
e2c953b6 10931
02071907 10932 shift_mask = ~0;
c5059423
AM
10933 shift_mask <<= INTVAL (shiftop);
10934
b6d08ca1 10935 /* Find the least significant one bit. */
c5059423
AM
10936 lsb = c & -c;
10937
10938 /* It must coincide with the LSB of the shift mask. */
10939 if (-lsb != shift_mask)
10940 return 0;
e2c953b6 10941
c5059423
AM
10942 /* Invert to look for the next transition (if any). */
10943 c = ~c;
10944
10945 /* Remove the low group of ones (originally low group of zeros). */
10946 c &= -lsb;
10947
10948 /* Again find the lsb, and check we have all 1's above. */
10949 lsb = c & -c;
10950 return c == -lsb;
10951 }
10952 else if (GET_CODE (andop) == CONST_DOUBLE
10953 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10954 {
02071907
AM
10955 HOST_WIDE_INT low, high, lsb;
10956 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10957
10958 low = CONST_DOUBLE_LOW (andop);
10959 if (HOST_BITS_PER_WIDE_INT < 64)
10960 high = CONST_DOUBLE_HIGH (andop);
10961
10962 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10963 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10964 return 0;
10965
10966 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10967 {
02071907 10968 shift_mask_high = ~0;
c5059423
AM
10969 if (INTVAL (shiftop) > 32)
10970 shift_mask_high <<= INTVAL (shiftop) - 32;
10971
10972 lsb = high & -high;
10973
10974 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10975 return 0;
10976
10977 high = ~high;
10978 high &= -lsb;
10979
10980 lsb = high & -high;
10981 return high == -lsb;
10982 }
10983
02071907 10984 shift_mask_low = ~0;
c5059423
AM
10985 shift_mask_low <<= INTVAL (shiftop);
10986
10987 lsb = low & -low;
10988
10989 if (-lsb != shift_mask_low)
10990 return 0;
10991
10992 if (HOST_BITS_PER_WIDE_INT < 64)
10993 high = ~high;
10994 low = ~low;
10995 low &= -lsb;
10996
10997 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10998 {
10999 lsb = high & -high;
11000 return high == -lsb;
11001 }
11002
11003 lsb = low & -low;
11004 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11005 }
11006 else
11007 return 0;
11008}
e2c953b6 11009
c5059423
AM
11010/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11011 to perform a left shift. It must have SHIFTOP or more least
c1207243 11012 significant 0's, with the remainder of the word 1's. */
e2c953b6 11013
c5059423 11014int
a2369ed3 11015includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11016{
e2c953b6 11017 if (GET_CODE (andop) == CONST_INT)
c5059423 11018 {
02071907 11019 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11020
02071907 11021 shift_mask = ~0;
c5059423
AM
11022 shift_mask <<= INTVAL (shiftop);
11023 c = INTVAL (andop);
11024
c1207243 11025 /* Find the least significant one bit. */
c5059423
AM
11026 lsb = c & -c;
11027
11028 /* It must be covered by the shift mask.
a4f6c312 11029 This test also rejects c == 0. */
c5059423
AM
11030 if ((lsb & shift_mask) == 0)
11031 return 0;
11032
11033 /* Check we have all 1's above the transition, and reject all 1's. */
11034 return c == -lsb && lsb != 1;
11035 }
11036 else if (GET_CODE (andop) == CONST_DOUBLE
11037 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11038 {
02071907 11039 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11040
11041 low = CONST_DOUBLE_LOW (andop);
11042
11043 if (HOST_BITS_PER_WIDE_INT < 64)
11044 {
02071907 11045 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11046
11047 high = CONST_DOUBLE_HIGH (andop);
11048
11049 if (low == 0)
11050 {
02071907 11051 shift_mask_high = ~0;
c5059423
AM
11052 if (INTVAL (shiftop) > 32)
11053 shift_mask_high <<= INTVAL (shiftop) - 32;
11054
11055 lsb = high & -high;
11056
11057 if ((lsb & shift_mask_high) == 0)
11058 return 0;
11059
11060 return high == -lsb;
11061 }
11062 if (high != ~0)
11063 return 0;
11064 }
11065
02071907 11066 shift_mask_low = ~0;
c5059423
AM
11067 shift_mask_low <<= INTVAL (shiftop);
11068
11069 lsb = low & -low;
11070
11071 if ((lsb & shift_mask_low) == 0)
11072 return 0;
11073
11074 return low == -lsb && lsb != 1;
11075 }
e2c953b6 11076 else
c5059423 11077 return 0;
9878760c 11078}
35068b43 11079
11ac38b2
DE
11080/* Return 1 if operands will generate a valid arguments to rlwimi
11081instruction for insert with right shift in 64-bit mode. The mask may
11082not start on the first bit or stop on the last bit because wrap-around
11083effects of instruction do not correspond to semantics of RTL insn. */
11084
11085int
11086insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11087{
429ec7dc
DE
11088 if (INTVAL (startop) > 32
11089 && INTVAL (startop) < 64
11090 && INTVAL (sizeop) > 1
11091 && INTVAL (sizeop) + INTVAL (startop) < 64
11092 && INTVAL (shiftop) > 0
11093 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11094 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11095 return 1;
11096
11097 return 0;
11098}
11099
35068b43 11100/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11101 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11102
11103int
a2369ed3 11104registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11105{
11106 /* We might have been passed a SUBREG. */
f676971a 11107 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11108 return 0;
f676971a 11109
90f81f99
AP
11110 /* We might have been passed non floating point registers. */
11111 if (!FP_REGNO_P (REGNO (reg1))
11112 || !FP_REGNO_P (REGNO (reg2)))
11113 return 0;
35068b43
RK
11114
11115 return (REGNO (reg1) == REGNO (reg2) - 1);
11116}
11117
a4f6c312
SS
11118/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11119 addr1 and addr2 must be in consecutive memory locations
11120 (addr2 == addr1 + 8). */
35068b43
RK
11121
11122int
90f81f99 11123mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11124{
90f81f99 11125 rtx addr1, addr2;
bb8df8a6
EC
11126 unsigned int reg1, reg2;
11127 int offset1, offset2;
35068b43 11128
90f81f99
AP
11129 /* The mems cannot be volatile. */
11130 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11131 return 0;
f676971a 11132
90f81f99
AP
11133 addr1 = XEXP (mem1, 0);
11134 addr2 = XEXP (mem2, 0);
11135
35068b43
RK
11136 /* Extract an offset (if used) from the first addr. */
11137 if (GET_CODE (addr1) == PLUS)
11138 {
11139 /* If not a REG, return zero. */
11140 if (GET_CODE (XEXP (addr1, 0)) != REG)
11141 return 0;
11142 else
11143 {
c4ad648e 11144 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11145 /* The offset must be constant! */
11146 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11147 return 0;
11148 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11149 }
11150 }
11151 else if (GET_CODE (addr1) != REG)
11152 return 0;
11153 else
11154 {
11155 reg1 = REGNO (addr1);
11156 /* This was a simple (mem (reg)) expression. Offset is 0. */
11157 offset1 = 0;
11158 }
11159
bb8df8a6
EC
11160 /* And now for the second addr. */
11161 if (GET_CODE (addr2) == PLUS)
11162 {
11163 /* If not a REG, return zero. */
11164 if (GET_CODE (XEXP (addr2, 0)) != REG)
11165 return 0;
11166 else
11167 {
11168 reg2 = REGNO (XEXP (addr2, 0));
11169 /* The offset must be constant. */
11170 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11171 return 0;
11172 offset2 = INTVAL (XEXP (addr2, 1));
11173 }
11174 }
11175 else if (GET_CODE (addr2) != REG)
35068b43 11176 return 0;
bb8df8a6
EC
11177 else
11178 {
11179 reg2 = REGNO (addr2);
11180 /* This was a simple (mem (reg)) expression. Offset is 0. */
11181 offset2 = 0;
11182 }
35068b43 11183
bb8df8a6
EC
11184 /* Both of these must have the same base register. */
11185 if (reg1 != reg2)
35068b43
RK
11186 return 0;
11187
11188 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11189 if (offset2 != offset1 + 8)
35068b43
RK
11190 return 0;
11191
11192 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11193 instructions. */
11194 return 1;
11195}
9878760c 11196\f
e41b2a33
PB
11197
11198rtx
11199rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11200{
11201 static bool eliminated = false;
11202 if (mode != SDmode)
11203 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11204 else
11205 {
11206 rtx mem = cfun->machine->sdmode_stack_slot;
11207 gcc_assert (mem != NULL_RTX);
11208
11209 if (!eliminated)
11210 {
11211 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11212 cfun->machine->sdmode_stack_slot = mem;
11213 eliminated = true;
11214 }
11215 return mem;
11216 }
11217}
11218
11219static tree
11220rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11221{
11222 /* Don't walk into types. */
11223 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11224 {
11225 *walk_subtrees = 0;
11226 return NULL_TREE;
11227 }
11228
11229 switch (TREE_CODE (*tp))
11230 {
11231 case VAR_DECL:
11232 case PARM_DECL:
11233 case FIELD_DECL:
11234 case RESULT_DECL:
11235 case REAL_CST:
11236 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11237 return *tp;
11238 break;
11239 default:
11240 break;
11241 }
11242
11243 return NULL_TREE;
11244}
11245
11246
11247/* Allocate a 64-bit stack slot to be used for copying SDmode
11248 values through if this function has any SDmode references. */
11249
11250static void
11251rs6000_alloc_sdmode_stack_slot (void)
11252{
11253 tree t;
11254 basic_block bb;
11255 block_stmt_iterator bsi;
11256
11257 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11258
11259 FOR_EACH_BB (bb)
11260 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11261 {
11262 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11263 rs6000_check_sdmode, NULL);
11264 if (ret)
11265 {
11266 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11267 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11268 SDmode, 0);
11269 return;
11270 }
11271 }
11272
11273 /* Check for any SDmode parameters of the function. */
11274 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11275 {
11276 if (TREE_TYPE (t) == error_mark_node)
11277 continue;
11278
11279 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11280 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11281 {
11282 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11283 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11284 SDmode, 0);
11285 return;
11286 }
11287 }
11288}
11289
11290static void
11291rs6000_instantiate_decls (void)
11292{
11293 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11294 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11295}
11296
9878760c
RK
11297/* Return the register class of a scratch register needed to copy IN into
11298 or out of a register in CLASS in MODE. If it can be done directly,
11299 NO_REGS is returned. */
11300
11301enum reg_class
3c4774e0
R
11302rs6000_secondary_reload_class (enum reg_class class,
11303 enum machine_mode mode ATTRIBUTE_UNUSED,
11304 rtx in)
9878760c 11305{
5accd822 11306 int regno;
9878760c 11307
ab82a49f
AP
11308 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11309#if TARGET_MACHO
c4ad648e 11310 && MACHOPIC_INDIRECT
ab82a49f 11311#endif
c4ad648e 11312 ))
46fad5b7
DJ
11313 {
11314 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11315 other than BASE_REGS for TARGET_ELF. So indicate that a
11316 register from BASE_REGS is needed as an intermediate
11317 register.
f676971a 11318
46fad5b7
DJ
11319 On Darwin, pic addresses require a load from memory, which
11320 needs a base register. */
11321 if (class != BASE_REGS
c4ad648e
AM
11322 && (GET_CODE (in) == SYMBOL_REF
11323 || GET_CODE (in) == HIGH
11324 || GET_CODE (in) == LABEL_REF
11325 || GET_CODE (in) == CONST))
11326 return BASE_REGS;
46fad5b7 11327 }
e7b7998a 11328
5accd822
DE
11329 if (GET_CODE (in) == REG)
11330 {
11331 regno = REGNO (in);
11332 if (regno >= FIRST_PSEUDO_REGISTER)
11333 {
11334 regno = true_regnum (in);
11335 if (regno >= FIRST_PSEUDO_REGISTER)
11336 regno = -1;
11337 }
11338 }
11339 else if (GET_CODE (in) == SUBREG)
11340 {
11341 regno = true_regnum (in);
11342 if (regno >= FIRST_PSEUDO_REGISTER)
11343 regno = -1;
11344 }
11345 else
11346 regno = -1;
11347
9878760c
RK
11348 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11349 into anything. */
11350 if (class == GENERAL_REGS || class == BASE_REGS
11351 || (regno >= 0 && INT_REGNO_P (regno)))
11352 return NO_REGS;
11353
11354 /* Constants, memory, and FP registers can go into FP registers. */
11355 if ((regno == -1 || FP_REGNO_P (regno))
11356 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11357 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11358
0ac081f6
AH
11359 /* Memory, and AltiVec registers can go into AltiVec registers. */
11360 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11361 && class == ALTIVEC_REGS)
11362 return NO_REGS;
11363
9878760c
RK
11364 /* We can copy among the CR registers. */
11365 if ((class == CR_REGS || class == CR0_REGS)
11366 && regno >= 0 && CR_REGNO_P (regno))
11367 return NO_REGS;
11368
11369 /* Otherwise, we need GENERAL_REGS. */
11370 return GENERAL_REGS;
11371}
11372\f
11373/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11374 know this is a valid comparison.
9878760c
RK
11375
11376 SCC_P is 1 if this is for an scc. That means that %D will have been
11377 used instead of %C, so the bits will be in different places.
11378
b4ac57ab 11379 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11380
11381int
a2369ed3 11382ccr_bit (rtx op, int scc_p)
9878760c
RK
11383{
11384 enum rtx_code code = GET_CODE (op);
11385 enum machine_mode cc_mode;
11386 int cc_regnum;
11387 int base_bit;
9ebbca7d 11388 rtx reg;
9878760c 11389
ec8e098d 11390 if (!COMPARISON_P (op))
9878760c
RK
11391 return -1;
11392
9ebbca7d
GK
11393 reg = XEXP (op, 0);
11394
37409796 11395 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11396
11397 cc_mode = GET_MODE (reg);
11398 cc_regnum = REGNO (reg);
11399 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11400
39a10a29 11401 validate_condition_mode (code, cc_mode);
c5defebb 11402
b7053a3f
GK
11403 /* When generating a sCOND operation, only positive conditions are
11404 allowed. */
37409796
NS
11405 gcc_assert (!scc_p
11406 || code == EQ || code == GT || code == LT || code == UNORDERED
11407 || code == GTU || code == LTU);
f676971a 11408
9878760c
RK
11409 switch (code)
11410 {
11411 case NE:
11412 return scc_p ? base_bit + 3 : base_bit + 2;
11413 case EQ:
11414 return base_bit + 2;
1c882ea4 11415 case GT: case GTU: case UNLE:
9878760c 11416 return base_bit + 1;
1c882ea4 11417 case LT: case LTU: case UNGE:
9878760c 11418 return base_bit;
1c882ea4
GK
11419 case ORDERED: case UNORDERED:
11420 return base_bit + 3;
9878760c
RK
11421
11422 case GE: case GEU:
39a10a29 11423 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11424 unordered position. So test that bit. For integer, this is ! LT
11425 unless this is an scc insn. */
39a10a29 11426 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11427
11428 case LE: case LEU:
39a10a29 11429 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11430
9878760c 11431 default:
37409796 11432 gcc_unreachable ();
9878760c
RK
11433 }
11434}
1ff7789b 11435\f
8d30c4ee 11436/* Return the GOT register. */
1ff7789b 11437
9390387d 11438rtx
a2369ed3 11439rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11440{
a4f6c312
SS
11441 /* The second flow pass currently (June 1999) can't update
11442 regs_ever_live without disturbing other parts of the compiler, so
11443 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11444 if (!can_create_pseudo_p ()
11445 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11446 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11447
8d30c4ee 11448 current_function_uses_pic_offset_table = 1;
3cb999d8 11449
1ff7789b
MM
11450 return pic_offset_table_rtx;
11451}
a7df97e6 11452\f
e2500fed
GK
11453/* Function to init struct machine_function.
11454 This will be called, via a pointer variable,
11455 from push_function_context. */
a7df97e6 11456
e2500fed 11457static struct machine_function *
863d938c 11458rs6000_init_machine_status (void)
a7df97e6 11459{
e2500fed 11460 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11461}
9878760c 11462\f
0ba1b2ff
AM
11463/* These macros test for integers and extract the low-order bits. */
11464#define INT_P(X) \
11465((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11466 && GET_MODE (X) == VOIDmode)
11467
11468#define INT_LOWPART(X) \
11469 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11470
11471int
a2369ed3 11472extract_MB (rtx op)
0ba1b2ff
AM
11473{
11474 int i;
11475 unsigned long val = INT_LOWPART (op);
11476
11477 /* If the high bit is zero, the value is the first 1 bit we find
11478 from the left. */
11479 if ((val & 0x80000000) == 0)
11480 {
37409796 11481 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11482
11483 i = 1;
11484 while (((val <<= 1) & 0x80000000) == 0)
11485 ++i;
11486 return i;
11487 }
11488
11489 /* If the high bit is set and the low bit is not, or the mask is all
11490 1's, the value is zero. */
11491 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11492 return 0;
11493
11494 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11495 from the right. */
11496 i = 31;
11497 while (((val >>= 1) & 1) != 0)
11498 --i;
11499
11500 return i;
11501}
11502
11503int
a2369ed3 11504extract_ME (rtx op)
0ba1b2ff
AM
11505{
11506 int i;
11507 unsigned long val = INT_LOWPART (op);
11508
11509 /* If the low bit is zero, the value is the first 1 bit we find from
11510 the right. */
11511 if ((val & 1) == 0)
11512 {
37409796 11513 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11514
11515 i = 30;
11516 while (((val >>= 1) & 1) == 0)
11517 --i;
11518
11519 return i;
11520 }
11521
11522 /* If the low bit is set and the high bit is not, or the mask is all
11523 1's, the value is 31. */
11524 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11525 return 31;
11526
11527 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11528 from the left. */
11529 i = 0;
11530 while (((val <<= 1) & 0x80000000) != 0)
11531 ++i;
11532
11533 return i;
11534}
11535
c4501e62
JJ
11536/* Locate some local-dynamic symbol still in use by this function
11537 so that we can print its name in some tls_ld pattern. */
11538
11539static const char *
863d938c 11540rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11541{
11542 rtx insn;
11543
11544 if (cfun->machine->some_ld_name)
11545 return cfun->machine->some_ld_name;
11546
11547 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11548 if (INSN_P (insn)
11549 && for_each_rtx (&PATTERN (insn),
11550 rs6000_get_some_local_dynamic_name_1, 0))
11551 return cfun->machine->some_ld_name;
11552
37409796 11553 gcc_unreachable ();
c4501e62
JJ
11554}
11555
11556/* Helper function for rs6000_get_some_local_dynamic_name. */
11557
11558static int
a2369ed3 11559rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11560{
11561 rtx x = *px;
11562
11563 if (GET_CODE (x) == SYMBOL_REF)
11564 {
11565 const char *str = XSTR (x, 0);
11566 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11567 {
11568 cfun->machine->some_ld_name = str;
11569 return 1;
11570 }
11571 }
11572
11573 return 0;
11574}
11575
85b776df
AM
11576/* Write out a function code label. */
11577
11578void
11579rs6000_output_function_entry (FILE *file, const char *fname)
11580{
11581 if (fname[0] != '.')
11582 {
11583 switch (DEFAULT_ABI)
11584 {
11585 default:
37409796 11586 gcc_unreachable ();
85b776df
AM
11587
11588 case ABI_AIX:
11589 if (DOT_SYMBOLS)
11590 putc ('.', file);
11591 else
11592 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11593 break;
11594
11595 case ABI_V4:
11596 case ABI_DARWIN:
11597 break;
11598 }
11599 }
11600 if (TARGET_AIX)
11601 RS6000_OUTPUT_BASENAME (file, fname);
11602 else
11603 assemble_name (file, fname);
11604}
11605
9878760c
RK
11606/* Print an operand. Recognize special options, documented below. */
11607
38c1f2d7 11608#if TARGET_ELF
d9407988 11609#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11610#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11611#else
11612#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11613#define SMALL_DATA_REG 0
ba5e43aa
MM
11614#endif
11615
9878760c 11616void
a2369ed3 11617print_operand (FILE *file, rtx x, int code)
9878760c
RK
11618{
11619 int i;
a260abc9 11620 HOST_WIDE_INT val;
0ba1b2ff 11621 unsigned HOST_WIDE_INT uval;
9878760c
RK
11622
11623 switch (code)
11624 {
a8b3aeda 11625 case '.':
a85d226b
RK
11626 /* Write out an instruction after the call which may be replaced
11627 with glue code by the loader. This depends on the AIX version. */
11628 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11629 return;
11630
81eace42
GK
11631 /* %a is output_address. */
11632
9854d9ed
RK
11633 case 'A':
11634 /* If X is a constant integer whose low-order 5 bits are zero,
11635 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11636 in the AIX assembler where "sri" with a zero shift count
20e26713 11637 writes a trash instruction. */
9854d9ed 11638 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11639 putc ('l', file);
9854d9ed 11640 else
76229ac8 11641 putc ('r', file);
9854d9ed
RK
11642 return;
11643
11644 case 'b':
e2c953b6
DE
11645 /* If constant, low-order 16 bits of constant, unsigned.
11646 Otherwise, write normally. */
11647 if (INT_P (x))
11648 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11649 else
11650 print_operand (file, x, 0);
cad12a8d
RK
11651 return;
11652
a260abc9
DE
11653 case 'B':
11654 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11655 for 64-bit mask direction. */
9390387d 11656 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11657 return;
a260abc9 11658
81eace42
GK
11659 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11660 output_operand. */
11661
423c1189
AH
11662 case 'c':
11663 /* X is a CR register. Print the number of the GT bit of the CR. */
11664 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11665 output_operand_lossage ("invalid %%E value");
11666 else
11667 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11668 return;
11669
11670 case 'D':
cef6b86c 11671 /* Like 'J' but get to the GT bit only. */
37409796 11672 gcc_assert (GET_CODE (x) == REG);
423c1189 11673
cef6b86c
EB
11674 /* Bit 1 is GT bit. */
11675 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11676
cef6b86c
EB
11677 /* Add one for shift count in rlinm for scc. */
11678 fprintf (file, "%d", i + 1);
423c1189
AH
11679 return;
11680
9854d9ed 11681 case 'E':
39a10a29 11682 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11683 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11684 output_operand_lossage ("invalid %%E value");
78fbdbf7 11685 else
39a10a29 11686 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11687 return;
9854d9ed
RK
11688
11689 case 'f':
11690 /* X is a CR register. Print the shift count needed to move it
11691 to the high-order four bits. */
11692 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11693 output_operand_lossage ("invalid %%f value");
11694 else
9ebbca7d 11695 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11696 return;
11697
11698 case 'F':
11699 /* Similar, but print the count for the rotate in the opposite
11700 direction. */
11701 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11702 output_operand_lossage ("invalid %%F value");
11703 else
9ebbca7d 11704 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11705 return;
11706
11707 case 'G':
11708 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11709 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11710 if (GET_CODE (x) != CONST_INT)
11711 output_operand_lossage ("invalid %%G value");
11712 else if (INTVAL (x) >= 0)
76229ac8 11713 putc ('z', file);
9854d9ed 11714 else
76229ac8 11715 putc ('m', file);
9854d9ed 11716 return;
e2c953b6 11717
9878760c 11718 case 'h':
a4f6c312
SS
11719 /* If constant, output low-order five bits. Otherwise, write
11720 normally. */
9878760c 11721 if (INT_P (x))
5f59ecb7 11722 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11723 else
11724 print_operand (file, x, 0);
11725 return;
11726
64305719 11727 case 'H':
a4f6c312
SS
11728 /* If constant, output low-order six bits. Otherwise, write
11729 normally. */
64305719 11730 if (INT_P (x))
5f59ecb7 11731 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11732 else
11733 print_operand (file, x, 0);
11734 return;
11735
9854d9ed
RK
11736 case 'I':
11737 /* Print `i' if this is a constant, else nothing. */
9878760c 11738 if (INT_P (x))
76229ac8 11739 putc ('i', file);
9878760c
RK
11740 return;
11741
9854d9ed
RK
11742 case 'j':
11743 /* Write the bit number in CCR for jump. */
11744 i = ccr_bit (x, 0);
11745 if (i == -1)
11746 output_operand_lossage ("invalid %%j code");
9878760c 11747 else
9854d9ed 11748 fprintf (file, "%d", i);
9878760c
RK
11749 return;
11750
9854d9ed
RK
11751 case 'J':
11752 /* Similar, but add one for shift count in rlinm for scc and pass
11753 scc flag to `ccr_bit'. */
11754 i = ccr_bit (x, 1);
11755 if (i == -1)
11756 output_operand_lossage ("invalid %%J code");
11757 else
a0466a68
RK
11758 /* If we want bit 31, write a shift count of zero, not 32. */
11759 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11760 return;
11761
9854d9ed
RK
11762 case 'k':
11763 /* X must be a constant. Write the 1's complement of the
11764 constant. */
9878760c 11765 if (! INT_P (x))
9854d9ed 11766 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11767 else
11768 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11769 return;
11770
81eace42 11771 case 'K':
9ebbca7d
GK
11772 /* X must be a symbolic constant on ELF. Write an
11773 expression suitable for an 'addi' that adds in the low 16
11774 bits of the MEM. */
11775 if (GET_CODE (x) != CONST)
11776 {
11777 print_operand_address (file, x);
11778 fputs ("@l", file);
11779 }
11780 else
11781 {
11782 if (GET_CODE (XEXP (x, 0)) != PLUS
11783 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11784 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11785 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11786 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11787 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11788 fputs ("@l", file);
ed8d2920
MM
11789 /* For GNU as, there must be a non-alphanumeric character
11790 between 'l' and the number. The '-' is added by
11791 print_operand() already. */
11792 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11793 fputs ("+", file);
9ebbca7d
GK
11794 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11795 }
81eace42
GK
11796 return;
11797
11798 /* %l is output_asm_label. */
9ebbca7d 11799
9854d9ed
RK
11800 case 'L':
11801 /* Write second word of DImode or DFmode reference. Works on register
11802 or non-indexed memory only. */
11803 if (GET_CODE (x) == REG)
fb5c67a7 11804 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11805 else if (GET_CODE (x) == MEM)
11806 {
11807 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11808 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11809 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11810 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11811 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11812 UNITS_PER_WORD));
6fb5fa3c
DB
11813 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11814 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11815 UNITS_PER_WORD));
9854d9ed 11816 else
d7624dc0
RK
11817 output_address (XEXP (adjust_address_nv (x, SImode,
11818 UNITS_PER_WORD),
11819 0));
ed8908e7 11820
ba5e43aa 11821 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11822 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11823 reg_names[SMALL_DATA_REG]);
9854d9ed 11824 }
9878760c 11825 return;
f676971a 11826
9878760c
RK
11827 case 'm':
11828 /* MB value for a mask operand. */
b1765bde 11829 if (! mask_operand (x, SImode))
9878760c
RK
11830 output_operand_lossage ("invalid %%m value");
11831
0ba1b2ff 11832 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11833 return;
11834
11835 case 'M':
11836 /* ME value for a mask operand. */
b1765bde 11837 if (! mask_operand (x, SImode))
a260abc9 11838 output_operand_lossage ("invalid %%M value");
9878760c 11839
0ba1b2ff 11840 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11841 return;
11842
81eace42
GK
11843 /* %n outputs the negative of its operand. */
11844
9878760c
RK
11845 case 'N':
11846 /* Write the number of elements in the vector times 4. */
11847 if (GET_CODE (x) != PARALLEL)
11848 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11849 else
11850 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11851 return;
11852
11853 case 'O':
11854 /* Similar, but subtract 1 first. */
11855 if (GET_CODE (x) != PARALLEL)
1427100a 11856 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11857 else
11858 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11859 return;
11860
9854d9ed
RK
11861 case 'p':
11862 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11863 if (! INT_P (x)
2bfcf297 11864 || INT_LOWPART (x) < 0
9854d9ed
RK
11865 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11866 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11867 else
11868 fprintf (file, "%d", i);
9854d9ed
RK
11869 return;
11870
9878760c
RK
11871 case 'P':
11872 /* The operand must be an indirect memory reference. The result
8bb418a3 11873 is the register name. */
9878760c
RK
11874 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11875 || REGNO (XEXP (x, 0)) >= 32)
11876 output_operand_lossage ("invalid %%P value");
e2c953b6 11877 else
fb5c67a7 11878 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11879 return;
11880
dfbdccdb
GK
11881 case 'q':
11882 /* This outputs the logical code corresponding to a boolean
11883 expression. The expression may have one or both operands
39a10a29 11884 negated (if one, only the first one). For condition register
c4ad648e
AM
11885 logical operations, it will also treat the negated
11886 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11887 {
63bc1d05 11888 const char *const *t = 0;
dfbdccdb
GK
11889 const char *s;
11890 enum rtx_code code = GET_CODE (x);
11891 static const char * const tbl[3][3] = {
11892 { "and", "andc", "nor" },
11893 { "or", "orc", "nand" },
11894 { "xor", "eqv", "xor" } };
11895
11896 if (code == AND)
11897 t = tbl[0];
11898 else if (code == IOR)
11899 t = tbl[1];
11900 else if (code == XOR)
11901 t = tbl[2];
11902 else
11903 output_operand_lossage ("invalid %%q value");
11904
11905 if (GET_CODE (XEXP (x, 0)) != NOT)
11906 s = t[0];
11907 else
11908 {
11909 if (GET_CODE (XEXP (x, 1)) == NOT)
11910 s = t[2];
11911 else
11912 s = t[1];
11913 }
f676971a 11914
dfbdccdb
GK
11915 fputs (s, file);
11916 }
11917 return;
11918
2c4a9cff
DE
11919 case 'Q':
11920 if (TARGET_MFCRF)
3b6ce0af 11921 fputc (',', file);
5efb1046 11922 /* FALLTHRU */
2c4a9cff
DE
11923 else
11924 return;
11925
9854d9ed
RK
11926 case 'R':
11927 /* X is a CR register. Print the mask for `mtcrf'. */
11928 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11929 output_operand_lossage ("invalid %%R value");
11930 else
9ebbca7d 11931 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11932 return;
9854d9ed
RK
11933
11934 case 's':
11935 /* Low 5 bits of 32 - value */
11936 if (! INT_P (x))
11937 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11938 else
11939 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11940 return;
9854d9ed 11941
a260abc9 11942 case 'S':
0ba1b2ff 11943 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11944 CONST_INT 32-bit mask is considered sign-extended so any
11945 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11946 if (! mask64_operand (x, DImode))
a260abc9
DE
11947 output_operand_lossage ("invalid %%S value");
11948
0ba1b2ff 11949 uval = INT_LOWPART (x);
a260abc9 11950
0ba1b2ff 11951 if (uval & 1) /* Clear Left */
a260abc9 11952 {
f099d360
GK
11953#if HOST_BITS_PER_WIDE_INT > 64
11954 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11955#endif
0ba1b2ff 11956 i = 64;
a260abc9 11957 }
0ba1b2ff 11958 else /* Clear Right */
a260abc9 11959 {
0ba1b2ff 11960 uval = ~uval;
f099d360
GK
11961#if HOST_BITS_PER_WIDE_INT > 64
11962 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11963#endif
0ba1b2ff 11964 i = 63;
a260abc9 11965 }
0ba1b2ff
AM
11966 while (uval != 0)
11967 --i, uval >>= 1;
37409796 11968 gcc_assert (i >= 0);
0ba1b2ff
AM
11969 fprintf (file, "%d", i);
11970 return;
a260abc9 11971
a3170dc6
AH
11972 case 't':
11973 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11974 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11975
11976 /* Bit 3 is OV bit. */
11977 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11978
11979 /* If we want bit 31, write a shift count of zero, not 32. */
11980 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11981 return;
11982
cccf3bdc
DE
11983 case 'T':
11984 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11985 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11986 && REGNO (x) != CTR_REGNO))
cccf3bdc 11987 output_operand_lossage ("invalid %%T value");
1de43f85 11988 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11989 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11990 else
11991 fputs ("ctr", file);
11992 return;
11993
9854d9ed 11994 case 'u':
802a0058 11995 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11996 if (! INT_P (x))
11997 output_operand_lossage ("invalid %%u value");
e2c953b6 11998 else
f676971a 11999 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12000 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12001 return;
12002
802a0058
MM
12003 case 'v':
12004 /* High-order 16 bits of constant for use in signed operand. */
12005 if (! INT_P (x))
12006 output_operand_lossage ("invalid %%v value");
e2c953b6 12007 else
134c32f6
DE
12008 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12009 (INT_LOWPART (x) >> 16) & 0xffff);
12010 return;
802a0058 12011
9854d9ed
RK
12012 case 'U':
12013 /* Print `u' if this has an auto-increment or auto-decrement. */
12014 if (GET_CODE (x) == MEM
12015 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12016 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12017 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12018 putc ('u', file);
9854d9ed 12019 return;
9878760c 12020
e0cd0770
JC
12021 case 'V':
12022 /* Print the trap code for this operand. */
12023 switch (GET_CODE (x))
12024 {
12025 case EQ:
12026 fputs ("eq", file); /* 4 */
12027 break;
12028 case NE:
12029 fputs ("ne", file); /* 24 */
12030 break;
12031 case LT:
12032 fputs ("lt", file); /* 16 */
12033 break;
12034 case LE:
12035 fputs ("le", file); /* 20 */
12036 break;
12037 case GT:
12038 fputs ("gt", file); /* 8 */
12039 break;
12040 case GE:
12041 fputs ("ge", file); /* 12 */
12042 break;
12043 case LTU:
12044 fputs ("llt", file); /* 2 */
12045 break;
12046 case LEU:
12047 fputs ("lle", file); /* 6 */
12048 break;
12049 case GTU:
12050 fputs ("lgt", file); /* 1 */
12051 break;
12052 case GEU:
12053 fputs ("lge", file); /* 5 */
12054 break;
12055 default:
37409796 12056 gcc_unreachable ();
e0cd0770
JC
12057 }
12058 break;
12059
9854d9ed
RK
12060 case 'w':
12061 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12062 normally. */
12063 if (INT_P (x))
f676971a 12064 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12065 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12066 else
12067 print_operand (file, x, 0);
9878760c
RK
12068 return;
12069
9854d9ed 12070 case 'W':
e2c953b6 12071 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12072 val = (GET_CODE (x) == CONST_INT
12073 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12074
12075 if (val < 0)
12076 i = -1;
9854d9ed 12077 else
e2c953b6
DE
12078 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12079 if ((val <<= 1) < 0)
12080 break;
12081
12082#if HOST_BITS_PER_WIDE_INT == 32
12083 if (GET_CODE (x) == CONST_INT && i >= 0)
12084 i += 32; /* zero-extend high-part was all 0's */
12085 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12086 {
12087 val = CONST_DOUBLE_LOW (x);
12088
37409796
NS
12089 gcc_assert (val);
12090 if (val < 0)
e2c953b6
DE
12091 --i;
12092 else
12093 for ( ; i < 64; i++)
12094 if ((val <<= 1) < 0)
12095 break;
12096 }
12097#endif
12098
12099 fprintf (file, "%d", i + 1);
9854d9ed 12100 return;
9878760c 12101
9854d9ed
RK
12102 case 'X':
12103 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12104 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12105 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12106 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12107 putc ('x', file);
9854d9ed 12108 return;
9878760c 12109
9854d9ed
RK
12110 case 'Y':
12111 /* Like 'L', for third word of TImode */
12112 if (GET_CODE (x) == REG)
fb5c67a7 12113 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12114 else if (GET_CODE (x) == MEM)
9878760c 12115 {
9854d9ed
RK
12116 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12117 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12118 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12119 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12120 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12121 else
d7624dc0 12122 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12123 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12124 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12125 reg_names[SMALL_DATA_REG]);
9878760c
RK
12126 }
12127 return;
f676971a 12128
9878760c 12129 case 'z':
b4ac57ab
RS
12130 /* X is a SYMBOL_REF. Write out the name preceded by a
12131 period and without any trailing data in brackets. Used for function
4d30c363
MM
12132 names. If we are configured for System V (or the embedded ABI) on
12133 the PowerPC, do not emit the period, since those systems do not use
12134 TOCs and the like. */
37409796 12135 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12136
c4ad648e
AM
12137 /* Mark the decl as referenced so that cgraph will output the
12138 function. */
9bf6462a 12139 if (SYMBOL_REF_DECL (x))
c4ad648e 12140 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12141
85b776df 12142 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12143 if (TARGET_MACHO)
12144 {
12145 const char *name = XSTR (x, 0);
a031e781 12146#if TARGET_MACHO
3b48085e 12147 if (MACHOPIC_INDIRECT
11abc112
MM
12148 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12149 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12150#endif
12151 assemble_name (file, name);
12152 }
85b776df 12153 else if (!DOT_SYMBOLS)
9739c90c 12154 assemble_name (file, XSTR (x, 0));
85b776df
AM
12155 else
12156 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12157 return;
12158
9854d9ed
RK
12159 case 'Z':
12160 /* Like 'L', for last word of TImode. */
12161 if (GET_CODE (x) == REG)
fb5c67a7 12162 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12163 else if (GET_CODE (x) == MEM)
12164 {
12165 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12166 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12167 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12168 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12169 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12170 else
d7624dc0 12171 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12172 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12173 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12174 reg_names[SMALL_DATA_REG]);
9854d9ed 12175 }
5c23c401 12176 return;
0ac081f6 12177
a3170dc6 12178 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12179 case 'y':
12180 {
12181 rtx tmp;
12182
37409796 12183 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12184
12185 tmp = XEXP (x, 0);
12186
90d3ff1c 12187 /* Ugly hack because %y is overloaded. */
8ef65e3d 12188 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12189 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12190 || GET_MODE (x) == TFmode
12191 || GET_MODE (x) == TImode))
a3170dc6
AH
12192 {
12193 /* Handle [reg]. */
12194 if (GET_CODE (tmp) == REG)
12195 {
12196 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12197 break;
12198 }
12199 /* Handle [reg+UIMM]. */
12200 else if (GET_CODE (tmp) == PLUS &&
12201 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12202 {
12203 int x;
12204
37409796 12205 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12206
12207 x = INTVAL (XEXP (tmp, 1));
12208 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12209 break;
12210 }
12211
12212 /* Fall through. Must be [reg+reg]. */
12213 }
850e8d3d
DN
12214 if (TARGET_ALTIVEC
12215 && GET_CODE (tmp) == AND
12216 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12217 && INTVAL (XEXP (tmp, 1)) == -16)
12218 tmp = XEXP (tmp, 0);
0ac081f6 12219 if (GET_CODE (tmp) == REG)
c62f2db5 12220 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12221 else
0ac081f6 12222 {
37409796 12223 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
12224 && REG_P (XEXP (tmp, 0))
12225 && REG_P (XEXP (tmp, 1)));
bb8df8a6 12226
0ac081f6
AH
12227 if (REGNO (XEXP (tmp, 0)) == 0)
12228 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12229 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12230 else
12231 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12232 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12233 }
0ac081f6
AH
12234 break;
12235 }
f676971a 12236
9878760c
RK
12237 case 0:
12238 if (GET_CODE (x) == REG)
12239 fprintf (file, "%s", reg_names[REGNO (x)]);
12240 else if (GET_CODE (x) == MEM)
12241 {
12242 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12243 know the width from the mode. */
12244 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12245 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12246 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12247 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12248 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12249 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12250 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12251 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12252 else
a54d04b7 12253 output_address (XEXP (x, 0));
9878760c
RK
12254 }
12255 else
a54d04b7 12256 output_addr_const (file, x);
a85d226b 12257 return;
9878760c 12258
c4501e62
JJ
12259 case '&':
12260 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12261 return;
12262
9878760c
RK
12263 default:
12264 output_operand_lossage ("invalid %%xn code");
12265 }
12266}
12267\f
12268/* Print the address of an operand. */
12269
12270void
a2369ed3 12271print_operand_address (FILE *file, rtx x)
9878760c
RK
12272{
12273 if (GET_CODE (x) == REG)
4697a36c 12274 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12275 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12276 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12277 {
12278 output_addr_const (file, x);
ba5e43aa 12279 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12280 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12281 reg_names[SMALL_DATA_REG]);
37409796
NS
12282 else
12283 gcc_assert (!TARGET_TOC);
9878760c
RK
12284 }
12285 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12286 {
9024f4b8 12287 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12288 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12289 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12290 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12291 else
4697a36c
MM
12292 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12293 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12294 }
12295 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12296 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12297 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12298#if TARGET_ELF
12299 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12300 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12301 {
12302 output_addr_const (file, XEXP (x, 1));
12303 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12304 }
c859cda6
DJ
12305#endif
12306#if TARGET_MACHO
12307 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12308 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12309 {
12310 fprintf (file, "lo16(");
12311 output_addr_const (file, XEXP (x, 1));
12312 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12313 }
3cb999d8 12314#endif
4d588c14 12315 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12316 {
2bfcf297 12317 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12318 {
2bfcf297
DB
12319 rtx contains_minus = XEXP (x, 1);
12320 rtx minus, symref;
12321 const char *name;
f676971a 12322
9ebbca7d 12323 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12324 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12325 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12326 contains_minus = XEXP (contains_minus, 0);
12327
2bfcf297
DB
12328 minus = XEXP (contains_minus, 0);
12329 symref = XEXP (minus, 0);
12330 XEXP (contains_minus, 0) = symref;
12331 if (TARGET_ELF)
12332 {
12333 char *newname;
12334
12335 name = XSTR (symref, 0);
12336 newname = alloca (strlen (name) + sizeof ("@toc"));
12337 strcpy (newname, name);
12338 strcat (newname, "@toc");
12339 XSTR (symref, 0) = newname;
12340 }
12341 output_addr_const (file, XEXP (x, 1));
12342 if (TARGET_ELF)
12343 XSTR (symref, 0) = name;
9ebbca7d
GK
12344 XEXP (contains_minus, 0) = minus;
12345 }
12346 else
12347 output_addr_const (file, XEXP (x, 1));
12348
12349 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12350 }
9878760c 12351 else
37409796 12352 gcc_unreachable ();
9878760c
RK
12353}
12354\f
88cad84b 12355/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12356 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12357 is defined. It also needs to handle DI-mode objects on 64-bit
12358 targets. */
12359
12360static bool
a2369ed3 12361rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12362{
f4f4921e 12363#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12364 /* Special handling for SI values. */
84dcde01 12365 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12366 {
301d03af 12367 static int recurse = 0;
f676971a 12368
301d03af
RS
12369 /* For -mrelocatable, we mark all addresses that need to be fixed up
12370 in the .fixup section. */
12371 if (TARGET_RELOCATABLE
d6b5193b
RS
12372 && in_section != toc_section
12373 && in_section != text_section
4325ca90 12374 && !unlikely_text_section_p (in_section)
301d03af
RS
12375 && !recurse
12376 && GET_CODE (x) != CONST_INT
12377 && GET_CODE (x) != CONST_DOUBLE
12378 && CONSTANT_P (x))
12379 {
12380 char buf[256];
12381
12382 recurse = 1;
12383 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12384 fixuplabelno++;
12385 ASM_OUTPUT_LABEL (asm_out_file, buf);
12386 fprintf (asm_out_file, "\t.long\t(");
12387 output_addr_const (asm_out_file, x);
12388 fprintf (asm_out_file, ")@fixup\n");
12389 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12390 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12391 fprintf (asm_out_file, "\t.long\t");
12392 assemble_name (asm_out_file, buf);
12393 fprintf (asm_out_file, "\n\t.previous\n");
12394 recurse = 0;
12395 return true;
12396 }
12397 /* Remove initial .'s to turn a -mcall-aixdesc function
12398 address into the address of the descriptor, not the function
12399 itself. */
12400 else if (GET_CODE (x) == SYMBOL_REF
12401 && XSTR (x, 0)[0] == '.'
12402 && DEFAULT_ABI == ABI_AIX)
12403 {
12404 const char *name = XSTR (x, 0);
12405 while (*name == '.')
12406 name++;
12407
12408 fprintf (asm_out_file, "\t.long\t%s\n", name);
12409 return true;
12410 }
12411 }
f4f4921e 12412#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12413 return default_assemble_integer (x, size, aligned_p);
12414}
93638d7a
AM
12415
12416#ifdef HAVE_GAS_HIDDEN
12417/* Emit an assembler directive to set symbol visibility for DECL to
12418 VISIBILITY_TYPE. */
12419
5add3202 12420static void
a2369ed3 12421rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12422{
93638d7a
AM
12423 /* Functions need to have their entry point symbol visibility set as
12424 well as their descriptor symbol visibility. */
85b776df
AM
12425 if (DEFAULT_ABI == ABI_AIX
12426 && DOT_SYMBOLS
12427 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12428 {
25fdb4dc 12429 static const char * const visibility_types[] = {
c4ad648e 12430 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12431 };
12432
12433 const char *name, *type;
93638d7a
AM
12434
12435 name = ((* targetm.strip_name_encoding)
12436 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12437 type = visibility_types[vis];
93638d7a 12438
25fdb4dc
RH
12439 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12440 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12441 }
25fdb4dc
RH
12442 else
12443 default_assemble_visibility (decl, vis);
93638d7a
AM
12444}
12445#endif
301d03af 12446\f
39a10a29 12447enum rtx_code
a2369ed3 12448rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12449{
12450 /* Reversal of FP compares takes care -- an ordered compare
12451 becomes an unordered compare and vice versa. */
f676971a 12452 if (mode == CCFPmode
bc9ec0e0
GK
12453 && (!flag_finite_math_only
12454 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12455 || code == UNEQ || code == LTGT))
bab6226b 12456 return reverse_condition_maybe_unordered (code);
39a10a29 12457 else
bab6226b 12458 return reverse_condition (code);
39a10a29
GK
12459}
12460
39a10a29
GK
12461/* Generate a compare for CODE. Return a brand-new rtx that
12462 represents the result of the compare. */
a4f6c312 12463
39a10a29 12464static rtx
a2369ed3 12465rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12466{
12467 enum machine_mode comp_mode;
12468 rtx compare_result;
12469
12470 if (rs6000_compare_fp_p)
12471 comp_mode = CCFPmode;
12472 else if (code == GTU || code == LTU
c4ad648e 12473 || code == GEU || code == LEU)
39a10a29 12474 comp_mode = CCUNSmode;
60934f9c
NS
12475 else if ((code == EQ || code == NE)
12476 && GET_CODE (rs6000_compare_op0) == SUBREG
12477 && GET_CODE (rs6000_compare_op1) == SUBREG
12478 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12479 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12480 /* These are unsigned values, perhaps there will be a later
12481 ordering compare that can be shared with this one.
12482 Unfortunately we cannot detect the signedness of the operands
12483 for non-subregs. */
12484 comp_mode = CCUNSmode;
39a10a29
GK
12485 else
12486 comp_mode = CCmode;
12487
12488 /* First, the compare. */
12489 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12490
cef6b86c 12491 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12492 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12493 && rs6000_compare_fp_p)
a3170dc6 12494 {
64022b5d 12495 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12496 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12497
12498 if (op_mode == VOIDmode)
12499 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12500
cef6b86c
EB
12501 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12502 This explains the following mess. */
423c1189 12503
a3170dc6
AH
12504 switch (code)
12505 {
423c1189 12506 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12507 switch (op_mode)
12508 {
12509 case SFmode:
12510 cmp = flag_unsafe_math_optimizations
12511 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12512 rs6000_compare_op1)
12513 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12514 rs6000_compare_op1);
12515 break;
12516
12517 case DFmode:
12518 cmp = flag_unsafe_math_optimizations
12519 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12520 rs6000_compare_op1)
12521 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12522 rs6000_compare_op1);
12523 break;
12524
17caeff2
JM
12525 case TFmode:
12526 cmp = flag_unsafe_math_optimizations
12527 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12528 rs6000_compare_op1)
12529 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12530 rs6000_compare_op1);
12531 break;
12532
37409796
NS
12533 default:
12534 gcc_unreachable ();
12535 }
a3170dc6 12536 break;
bb8df8a6 12537
423c1189 12538 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12539 switch (op_mode)
12540 {
12541 case SFmode:
12542 cmp = flag_unsafe_math_optimizations
12543 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12544 rs6000_compare_op1)
12545 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12546 rs6000_compare_op1);
12547 break;
bb8df8a6 12548
37409796
NS
12549 case DFmode:
12550 cmp = flag_unsafe_math_optimizations
12551 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12552 rs6000_compare_op1)
12553 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12554 rs6000_compare_op1);
12555 break;
12556
17caeff2
JM
12557 case TFmode:
12558 cmp = flag_unsafe_math_optimizations
12559 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12560 rs6000_compare_op1)
12561 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12562 rs6000_compare_op1);
12563 break;
12564
37409796
NS
12565 default:
12566 gcc_unreachable ();
12567 }
a3170dc6 12568 break;
bb8df8a6 12569
423c1189 12570 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12571 switch (op_mode)
12572 {
12573 case SFmode:
12574 cmp = flag_unsafe_math_optimizations
12575 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12576 rs6000_compare_op1)
12577 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12578 rs6000_compare_op1);
12579 break;
bb8df8a6 12580
37409796
NS
12581 case DFmode:
12582 cmp = flag_unsafe_math_optimizations
12583 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12584 rs6000_compare_op1)
12585 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12586 rs6000_compare_op1);
12587 break;
12588
17caeff2
JM
12589 case TFmode:
12590 cmp = flag_unsafe_math_optimizations
12591 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12592 rs6000_compare_op1)
12593 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12594 rs6000_compare_op1);
12595 break;
12596
37409796
NS
12597 default:
12598 gcc_unreachable ();
12599 }
a3170dc6 12600 break;
4d4cbc0e 12601 default:
37409796 12602 gcc_unreachable ();
a3170dc6
AH
12603 }
12604
12605 /* Synthesize LE and GE from LT/GT || EQ. */
12606 if (code == LE || code == GE || code == LEU || code == GEU)
12607 {
a3170dc6
AH
12608 emit_insn (cmp);
12609
12610 switch (code)
12611 {
12612 case LE: code = LT; break;
12613 case GE: code = GT; break;
12614 case LEU: code = LT; break;
12615 case GEU: code = GT; break;
37409796 12616 default: gcc_unreachable ();
a3170dc6
AH
12617 }
12618
a3170dc6
AH
12619 compare_result2 = gen_reg_rtx (CCFPmode);
12620
12621 /* Do the EQ. */
37409796
NS
12622 switch (op_mode)
12623 {
12624 case SFmode:
12625 cmp = flag_unsafe_math_optimizations
12626 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12627 rs6000_compare_op1)
12628 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12629 rs6000_compare_op1);
12630 break;
12631
12632 case DFmode:
12633 cmp = flag_unsafe_math_optimizations
12634 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12635 rs6000_compare_op1)
12636 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12637 rs6000_compare_op1);
12638 break;
12639
17caeff2
JM
12640 case TFmode:
12641 cmp = flag_unsafe_math_optimizations
12642 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12643 rs6000_compare_op1)
12644 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12645 rs6000_compare_op1);
12646 break;
12647
37409796
NS
12648 default:
12649 gcc_unreachable ();
12650 }
a3170dc6
AH
12651 emit_insn (cmp);
12652
a3170dc6 12653 /* OR them together. */
64022b5d
AH
12654 or_result = gen_reg_rtx (CCFPmode);
12655 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12656 compare_result2);
a3170dc6
AH
12657 compare_result = or_result;
12658 code = EQ;
12659 }
12660 else
12661 {
a3170dc6 12662 if (code == NE || code == LTGT)
a3170dc6 12663 code = NE;
423c1189
AH
12664 else
12665 code = EQ;
a3170dc6
AH
12666 }
12667
12668 emit_insn (cmp);
12669 }
12670 else
de17c25f
DE
12671 {
12672 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12673 CLOBBERs to match cmptf_internal2 pattern. */
12674 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12675 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12676 && !TARGET_IEEEQUAD
de17c25f
DE
12677 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12678 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12679 gen_rtvec (9,
12680 gen_rtx_SET (VOIDmode,
12681 compare_result,
12682 gen_rtx_COMPARE (comp_mode,
12683 rs6000_compare_op0,
12684 rs6000_compare_op1)),
12685 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12686 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12687 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12688 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12689 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12690 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12691 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12692 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12693 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12694 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12695 {
12696 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12697 comp_mode = CCEQmode;
12698 compare_result = gen_reg_rtx (CCEQmode);
12699 if (TARGET_64BIT)
12700 emit_insn (gen_stack_protect_testdi (compare_result,
12701 rs6000_compare_op0, op1));
12702 else
12703 emit_insn (gen_stack_protect_testsi (compare_result,
12704 rs6000_compare_op0, op1));
12705 }
de17c25f
DE
12706 else
12707 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12708 gen_rtx_COMPARE (comp_mode,
12709 rs6000_compare_op0,
12710 rs6000_compare_op1)));
12711 }
f676971a 12712
ca5adc63 12713 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12714 under flag_finite_math_only we don't bother. */
39a10a29 12715 if (rs6000_compare_fp_p
e7108df9 12716 && !flag_finite_math_only
8ef65e3d 12717 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12718 && (code == LE || code == GE
12719 || code == UNEQ || code == LTGT
12720 || code == UNGT || code == UNLT))
12721 {
12722 enum rtx_code or1, or2;
12723 rtx or1_rtx, or2_rtx, compare2_rtx;
12724 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12725
39a10a29
GK
12726 switch (code)
12727 {
12728 case LE: or1 = LT; or2 = EQ; break;
12729 case GE: or1 = GT; or2 = EQ; break;
12730 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12731 case LTGT: or1 = LT; or2 = GT; break;
12732 case UNGT: or1 = UNORDERED; or2 = GT; break;
12733 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12734 default: gcc_unreachable ();
39a10a29
GK
12735 }
12736 validate_condition_mode (or1, comp_mode);
12737 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12738 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12739 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12740 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12741 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12742 const_true_rtx);
12743 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12744
12745 compare_result = or_result;
12746 code = EQ;
12747 }
12748
12749 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12750
1c563bed 12751 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12752}
12753
12754
12755/* Emit the RTL for an sCOND pattern. */
12756
12757void
a2369ed3 12758rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12759{
12760 rtx condition_rtx;
12761 enum machine_mode op_mode;
b7053a3f 12762 enum rtx_code cond_code;
39a10a29
GK
12763
12764 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12765 cond_code = GET_CODE (condition_rtx);
12766
8ef65e3d 12767 if (rs6000_compare_fp_p
423c1189
AH
12768 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12769 {
12770 rtx t;
12771
12772 PUT_MODE (condition_rtx, SImode);
12773 t = XEXP (condition_rtx, 0);
12774
37409796 12775 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12776
12777 if (cond_code == NE)
64022b5d 12778 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12779
64022b5d 12780 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12781 return;
12782 }
12783
b7053a3f
GK
12784 if (cond_code == NE
12785 || cond_code == GE || cond_code == LE
12786 || cond_code == GEU || cond_code == LEU
12787 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12788 {
12789 rtx not_result = gen_reg_rtx (CCEQmode);
12790 rtx not_op, rev_cond_rtx;
12791 enum machine_mode cc_mode;
f676971a 12792
b7053a3f
GK
12793 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12794
1c563bed 12795 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12796 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12797 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12798 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12799 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12800 }
39a10a29
GK
12801
12802 op_mode = GET_MODE (rs6000_compare_op0);
12803 if (op_mode == VOIDmode)
12804 op_mode = GET_MODE (rs6000_compare_op1);
12805
12806 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12807 {
12808 PUT_MODE (condition_rtx, DImode);
12809 convert_move (result, condition_rtx, 0);
12810 }
12811 else
12812 {
12813 PUT_MODE (condition_rtx, SImode);
12814 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12815 }
12816}
12817
39a10a29
GK
12818/* Emit a branch of kind CODE to location LOC. */
12819
12820void
a2369ed3 12821rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12822{
12823 rtx condition_rtx, loc_ref;
12824
12825 condition_rtx = rs6000_generate_compare (code);
12826 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12827 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12828 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12829 loc_ref, pc_rtx)));
12830}
12831
12a4e8c5
GK
12832/* Return the string to output a conditional branch to LABEL, which is
12833 the operand number of the label, or -1 if the branch is really a
f676971a 12834 conditional return.
12a4e8c5
GK
12835
12836 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12837 condition code register and its mode specifies what kind of
12838 comparison we made.
12839
a0ab749a 12840 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12841
12842 INSN is the insn. */
12843
12844char *
a2369ed3 12845output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12846{
12847 static char string[64];
12848 enum rtx_code code = GET_CODE (op);
12849 rtx cc_reg = XEXP (op, 0);
12850 enum machine_mode mode = GET_MODE (cc_reg);
12851 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12852 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12853 int really_reversed = reversed ^ need_longbranch;
12854 char *s = string;
12855 const char *ccode;
12856 const char *pred;
12857 rtx note;
12858
39a10a29
GK
12859 validate_condition_mode (code, mode);
12860
12861 /* Work out which way this really branches. We could use
12862 reverse_condition_maybe_unordered here always but this
12863 makes the resulting assembler clearer. */
12a4e8c5 12864 if (really_reversed)
de40e1df
DJ
12865 {
12866 /* Reversal of FP compares takes care -- an ordered compare
12867 becomes an unordered compare and vice versa. */
12868 if (mode == CCFPmode)
12869 code = reverse_condition_maybe_unordered (code);
12870 else
12871 code = reverse_condition (code);
12872 }
12a4e8c5 12873
8ef65e3d 12874 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12875 {
12876 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12877 to the GT bit. */
37409796
NS
12878 switch (code)
12879 {
12880 case EQ:
12881 /* Opposite of GT. */
12882 code = GT;
12883 break;
12884
12885 case NE:
12886 code = UNLE;
12887 break;
12888
12889 default:
12890 gcc_unreachable ();
12891 }
a3170dc6
AH
12892 }
12893
39a10a29 12894 switch (code)
12a4e8c5
GK
12895 {
12896 /* Not all of these are actually distinct opcodes, but
12897 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12898 case NE: case LTGT:
12899 ccode = "ne"; break;
12900 case EQ: case UNEQ:
12901 ccode = "eq"; break;
f676971a 12902 case GE: case GEU:
50a0b056 12903 ccode = "ge"; break;
f676971a 12904 case GT: case GTU: case UNGT:
50a0b056 12905 ccode = "gt"; break;
f676971a 12906 case LE: case LEU:
50a0b056 12907 ccode = "le"; break;
f676971a 12908 case LT: case LTU: case UNLT:
50a0b056 12909 ccode = "lt"; break;
12a4e8c5
GK
12910 case UNORDERED: ccode = "un"; break;
12911 case ORDERED: ccode = "nu"; break;
12912 case UNGE: ccode = "nl"; break;
12913 case UNLE: ccode = "ng"; break;
12914 default:
37409796 12915 gcc_unreachable ();
12a4e8c5 12916 }
f676971a
EC
12917
12918 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12919 The old mnemonics don't have a way to specify this information. */
f4857b9b 12920 pred = "";
12a4e8c5
GK
12921 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12922 if (note != NULL_RTX)
12923 {
12924 /* PROB is the difference from 50%. */
12925 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12926
12927 /* Only hint for highly probable/improbable branches on newer
12928 cpus as static prediction overrides processor dynamic
12929 prediction. For older cpus we may as well always hint, but
12930 assume not taken for branches that are very close to 50% as a
12931 mispredicted taken branch is more expensive than a
f676971a 12932 mispredicted not-taken branch. */
ec507f2d 12933 if (rs6000_always_hint
2c9e13f3
JH
12934 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12935 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12936 {
12937 if (abs (prob) > REG_BR_PROB_BASE / 20
12938 && ((prob > 0) ^ need_longbranch))
c4ad648e 12939 pred = "+";
f4857b9b
AM
12940 else
12941 pred = "-";
12942 }
12a4e8c5 12943 }
12a4e8c5
GK
12944
12945 if (label == NULL)
94a54f47 12946 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12947 else
94a54f47 12948 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12949
37c67319 12950 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12951 Assume they'd only be the first character.... */
37c67319
GK
12952 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12953 *s++ = '%';
94a54f47 12954 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12955
12956 if (label != NULL)
12957 {
12958 /* If the branch distance was too far, we may have to use an
12959 unconditional branch to go the distance. */
12960 if (need_longbranch)
44518ddd 12961 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12962 else
12963 s += sprintf (s, ",%s", label);
12964 }
12965
12966 return string;
12967}
50a0b056 12968
64022b5d 12969/* Return the string to flip the GT bit on a CR. */
423c1189 12970char *
64022b5d 12971output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12972{
12973 static char string[64];
12974 int a, b;
12975
37409796
NS
12976 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12977 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12978
64022b5d
AH
12979 /* GT bit. */
12980 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12981 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12982
12983 sprintf (string, "crnot %d,%d", a, b);
12984 return string;
12985}
12986
21213b4c
DP
12987/* Return insn index for the vector compare instruction for given CODE,
12988 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12989 not available. */
12990
12991static int
94ff898d 12992get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12993 enum machine_mode dest_mode,
12994 enum machine_mode op_mode)
12995{
12996 if (!TARGET_ALTIVEC)
12997 return INSN_NOT_AVAILABLE;
12998
12999 switch (code)
13000 {
13001 case EQ:
13002 if (dest_mode == V16QImode && op_mode == V16QImode)
13003 return UNSPEC_VCMPEQUB;
13004 if (dest_mode == V8HImode && op_mode == V8HImode)
13005 return UNSPEC_VCMPEQUH;
13006 if (dest_mode == V4SImode && op_mode == V4SImode)
13007 return UNSPEC_VCMPEQUW;
13008 if (dest_mode == V4SImode && op_mode == V4SFmode)
13009 return UNSPEC_VCMPEQFP;
13010 break;
13011 case GE:
13012 if (dest_mode == V4SImode && op_mode == V4SFmode)
13013 return UNSPEC_VCMPGEFP;
13014 case GT:
13015 if (dest_mode == V16QImode && op_mode == V16QImode)
13016 return UNSPEC_VCMPGTSB;
13017 if (dest_mode == V8HImode && op_mode == V8HImode)
13018 return UNSPEC_VCMPGTSH;
13019 if (dest_mode == V4SImode && op_mode == V4SImode)
13020 return UNSPEC_VCMPGTSW;
13021 if (dest_mode == V4SImode && op_mode == V4SFmode)
13022 return UNSPEC_VCMPGTFP;
13023 break;
13024 case GTU:
13025 if (dest_mode == V16QImode && op_mode == V16QImode)
13026 return UNSPEC_VCMPGTUB;
13027 if (dest_mode == V8HImode && op_mode == V8HImode)
13028 return UNSPEC_VCMPGTUH;
13029 if (dest_mode == V4SImode && op_mode == V4SImode)
13030 return UNSPEC_VCMPGTUW;
13031 break;
13032 default:
13033 break;
13034 }
13035 return INSN_NOT_AVAILABLE;
13036}
13037
13038/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13039 DMODE is expected destination mode. This is a recursive function. */
13040
13041static rtx
13042rs6000_emit_vector_compare (enum rtx_code rcode,
13043 rtx op0, rtx op1,
13044 enum machine_mode dmode)
13045{
13046 int vec_cmp_insn;
13047 rtx mask;
13048 enum machine_mode dest_mode;
13049 enum machine_mode op_mode = GET_MODE (op1);
13050
37409796
NS
13051 gcc_assert (TARGET_ALTIVEC);
13052 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13053
13054 /* Floating point vector compare instructions uses destination V4SImode.
13055 Move destination to appropriate mode later. */
13056 if (dmode == V4SFmode)
13057 dest_mode = V4SImode;
13058 else
13059 dest_mode = dmode;
13060
13061 mask = gen_reg_rtx (dest_mode);
13062 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13063
13064 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13065 {
13066 bool swap_operands = false;
13067 bool try_again = false;
13068 switch (rcode)
13069 {
13070 case LT:
13071 rcode = GT;
13072 swap_operands = true;
13073 try_again = true;
13074 break;
13075 case LTU:
13076 rcode = GTU;
13077 swap_operands = true;
13078 try_again = true;
13079 break;
13080 case NE:
370df7db
JC
13081 case UNLE:
13082 case UNLT:
13083 case UNGE:
13084 case UNGT:
13085 /* Invert condition and try again.
13086 e.g., A != B becomes ~(A==B). */
21213b4c 13087 {
370df7db 13088 enum rtx_code rev_code;
21213b4c 13089 enum insn_code nor_code;
d1123cde 13090 rtx eq_rtx;
370df7db
JC
13091
13092 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13093 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13094 dest_mode);
94ff898d 13095
166cdb08 13096 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13097 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13098 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13099
13100 if (dmode != dest_mode)
13101 {
13102 rtx temp = gen_reg_rtx (dest_mode);
13103 convert_move (temp, mask, 0);
13104 return temp;
13105 }
13106 return mask;
13107 }
13108 break;
13109 case GE:
13110 case GEU:
13111 case LE:
13112 case LEU:
13113 /* Try GT/GTU/LT/LTU OR EQ */
13114 {
13115 rtx c_rtx, eq_rtx;
13116 enum insn_code ior_code;
13117 enum rtx_code new_code;
13118
37409796
NS
13119 switch (rcode)
13120 {
13121 case GE:
13122 new_code = GT;
13123 break;
13124
13125 case GEU:
13126 new_code = GTU;
13127 break;
13128
13129 case LE:
13130 new_code = LT;
13131 break;
13132
13133 case LEU:
13134 new_code = LTU;
13135 break;
13136
13137 default:
13138 gcc_unreachable ();
13139 }
21213b4c
DP
13140
13141 c_rtx = rs6000_emit_vector_compare (new_code,
13142 op0, op1, dest_mode);
13143 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13144 dest_mode);
13145
166cdb08 13146 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13147 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13148 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13149 if (dmode != dest_mode)
13150 {
13151 rtx temp = gen_reg_rtx (dest_mode);
13152 convert_move (temp, mask, 0);
13153 return temp;
13154 }
13155 return mask;
13156 }
13157 break;
13158 default:
37409796 13159 gcc_unreachable ();
21213b4c
DP
13160 }
13161
13162 if (try_again)
13163 {
13164 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13165 /* You only get two chances. */
13166 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13167 }
13168
13169 if (swap_operands)
13170 {
13171 rtx tmp;
13172 tmp = op0;
13173 op0 = op1;
13174 op1 = tmp;
13175 }
13176 }
13177
915167f5
GK
13178 emit_insn (gen_rtx_SET (VOIDmode, mask,
13179 gen_rtx_UNSPEC (dest_mode,
13180 gen_rtvec (2, op0, op1),
13181 vec_cmp_insn)));
21213b4c
DP
13182 if (dmode != dest_mode)
13183 {
13184 rtx temp = gen_reg_rtx (dest_mode);
13185 convert_move (temp, mask, 0);
13186 return temp;
13187 }
13188 return mask;
13189}
13190
13191/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13192 valid insn doesn exist for given mode. */
13193
13194static int
13195get_vsel_insn (enum machine_mode mode)
13196{
13197 switch (mode)
13198 {
13199 case V4SImode:
13200 return UNSPEC_VSEL4SI;
13201 break;
13202 case V4SFmode:
13203 return UNSPEC_VSEL4SF;
13204 break;
13205 case V8HImode:
13206 return UNSPEC_VSEL8HI;
13207 break;
13208 case V16QImode:
13209 return UNSPEC_VSEL16QI;
13210 break;
13211 default:
13212 return INSN_NOT_AVAILABLE;
13213 break;
13214 }
13215 return INSN_NOT_AVAILABLE;
13216}
13217
13218/* Emit vector select insn where DEST is destination using
13219 operands OP1, OP2 and MASK. */
13220
13221static void
13222rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13223{
13224 rtx t, temp;
13225 enum machine_mode dest_mode = GET_MODE (dest);
13226 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13227
13228 temp = gen_reg_rtx (dest_mode);
94ff898d 13229
bb8df8a6 13230 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13231 select op2. */
915167f5
GK
13232 t = gen_rtx_SET (VOIDmode, temp,
13233 gen_rtx_UNSPEC (dest_mode,
13234 gen_rtvec (3, op2, op1, mask),
13235 vsel_insn_index));
21213b4c
DP
13236 emit_insn (t);
13237 emit_move_insn (dest, temp);
13238 return;
13239}
13240
94ff898d 13241/* Emit vector conditional expression.
21213b4c
DP
13242 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13243 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13244
13245int
13246rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13247 rtx cond, rtx cc_op0, rtx cc_op1)
13248{
13249 enum machine_mode dest_mode = GET_MODE (dest);
13250 enum rtx_code rcode = GET_CODE (cond);
13251 rtx mask;
13252
13253 if (!TARGET_ALTIVEC)
13254 return 0;
13255
13256 /* Get the vector mask for the given relational operations. */
13257 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13258
13259 rs6000_emit_vector_select (dest, op1, op2, mask);
13260
13261 return 1;
13262}
13263
50a0b056
GK
13264/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13265 operands of the last comparison is nonzero/true, FALSE_COND if it
13266 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13267
50a0b056 13268int
a2369ed3 13269rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13270{
13271 enum rtx_code code = GET_CODE (op);
13272 rtx op0 = rs6000_compare_op0;
13273 rtx op1 = rs6000_compare_op1;
13274 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13275 enum machine_mode compare_mode = GET_MODE (op0);
13276 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13277 rtx temp;
add2402e 13278 bool is_against_zero;
50a0b056 13279
a3c9585f 13280 /* These modes should always match. */
a3170dc6
AH
13281 if (GET_MODE (op1) != compare_mode
13282 /* In the isel case however, we can use a compare immediate, so
13283 op1 may be a small constant. */
13284 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13285 return 0;
178c3eff 13286 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13287 return 0;
178c3eff 13288 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13289 return 0;
13290
50a0b056 13291 /* First, work out if the hardware can do this at all, or
a3c9585f 13292 if it's too slow.... */
50a0b056 13293 if (! rs6000_compare_fp_p)
a3170dc6
AH
13294 {
13295 if (TARGET_ISEL)
13296 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13297 return 0;
13298 }
8ef65e3d 13299 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13300 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13301 return 0;
50a0b056 13302
add2402e 13303 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13304
add2402e
GK
13305 /* A floating-point subtract might overflow, underflow, or produce
13306 an inexact result, thus changing the floating-point flags, so it
13307 can't be generated if we care about that. It's safe if one side
13308 of the construct is zero, since then no subtract will be
13309 generated. */
ebb109ad 13310 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13311 && flag_trapping_math && ! is_against_zero)
13312 return 0;
13313
50a0b056
GK
13314 /* Eliminate half of the comparisons by switching operands, this
13315 makes the remaining code simpler. */
13316 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13317 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13318 {
13319 code = reverse_condition_maybe_unordered (code);
13320 temp = true_cond;
13321 true_cond = false_cond;
13322 false_cond = temp;
13323 }
13324
13325 /* UNEQ and LTGT take four instructions for a comparison with zero,
13326 it'll probably be faster to use a branch here too. */
bc9ec0e0 13327 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13328 return 0;
f676971a 13329
50a0b056
GK
13330 if (GET_CODE (op1) == CONST_DOUBLE)
13331 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13332
b6d08ca1 13333 /* We're going to try to implement comparisons by performing
50a0b056
GK
13334 a subtract, then comparing against zero. Unfortunately,
13335 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13336 know that the operand is finite and the comparison
50a0b056 13337 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13338 if (HONOR_INFINITIES (compare_mode)
50a0b056 13339 && code != GT && code != UNGE
045572c7 13340 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13341 /* Constructs of the form (a OP b ? a : b) are safe. */
13342 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13343 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13344 && ! rtx_equal_p (op1, true_cond))))
13345 return 0;
add2402e 13346
50a0b056
GK
13347 /* At this point we know we can use fsel. */
13348
13349 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13350 if (! is_against_zero)
13351 {
13352 temp = gen_reg_rtx (compare_mode);
13353 emit_insn (gen_rtx_SET (VOIDmode, temp,
13354 gen_rtx_MINUS (compare_mode, op0, op1)));
13355 op0 = temp;
13356 op1 = CONST0_RTX (compare_mode);
13357 }
50a0b056
GK
13358
13359 /* If we don't care about NaNs we can reduce some of the comparisons
13360 down to faster ones. */
bc9ec0e0 13361 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13362 switch (code)
13363 {
13364 case GT:
13365 code = LE;
13366 temp = true_cond;
13367 true_cond = false_cond;
13368 false_cond = temp;
13369 break;
13370 case UNGE:
13371 code = GE;
13372 break;
13373 case UNEQ:
13374 code = EQ;
13375 break;
13376 default:
13377 break;
13378 }
13379
13380 /* Now, reduce everything down to a GE. */
13381 switch (code)
13382 {
13383 case GE:
13384 break;
13385
13386 case LE:
3148ad6d
DJ
13387 temp = gen_reg_rtx (compare_mode);
13388 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13389 op0 = temp;
13390 break;
13391
13392 case ORDERED:
3148ad6d
DJ
13393 temp = gen_reg_rtx (compare_mode);
13394 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13395 op0 = temp;
13396 break;
13397
13398 case EQ:
3148ad6d 13399 temp = gen_reg_rtx (compare_mode);
f676971a 13400 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13401 gen_rtx_NEG (compare_mode,
13402 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13403 op0 = temp;
13404 break;
13405
13406 case UNGE:
bc9ec0e0 13407 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13408 temp = gen_reg_rtx (result_mode);
50a0b056 13409 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13410 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13411 gen_rtx_GE (VOIDmode,
13412 op0, op1),
13413 true_cond, false_cond)));
bc9ec0e0
GK
13414 false_cond = true_cond;
13415 true_cond = temp;
50a0b056 13416
3148ad6d
DJ
13417 temp = gen_reg_rtx (compare_mode);
13418 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13419 op0 = temp;
13420 break;
13421
13422 case GT:
bc9ec0e0 13423 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13424 temp = gen_reg_rtx (result_mode);
50a0b056 13425 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13426 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13427 gen_rtx_GE (VOIDmode,
13428 op0, op1),
13429 true_cond, false_cond)));
bc9ec0e0
GK
13430 true_cond = false_cond;
13431 false_cond = temp;
50a0b056 13432
3148ad6d
DJ
13433 temp = gen_reg_rtx (compare_mode);
13434 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13435 op0 = temp;
13436 break;
13437
13438 default:
37409796 13439 gcc_unreachable ();
50a0b056
GK
13440 }
13441
13442 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13443 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13444 gen_rtx_GE (VOIDmode,
13445 op0, op1),
13446 true_cond, false_cond)));
13447 return 1;
13448}
13449
a3170dc6
AH
13450/* Same as above, but for ints (isel). */
13451
13452static int
a2369ed3 13453rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13454{
13455 rtx condition_rtx, cr;
13456
13457 /* All isel implementations thus far are 32-bits. */
13458 if (GET_MODE (rs6000_compare_op0) != SImode)
13459 return 0;
13460
13461 /* We still have to do the compare, because isel doesn't do a
13462 compare, it just looks at the CRx bits set by a previous compare
13463 instruction. */
13464 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13465 cr = XEXP (condition_rtx, 0);
13466
13467 if (GET_MODE (cr) == CCmode)
13468 emit_insn (gen_isel_signed (dest, condition_rtx,
13469 true_cond, false_cond, cr));
13470 else
13471 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13472 true_cond, false_cond, cr));
13473
13474 return 1;
13475}
13476
13477const char *
a2369ed3 13478output_isel (rtx *operands)
a3170dc6
AH
13479{
13480 enum rtx_code code;
13481
13482 code = GET_CODE (operands[1]);
13483 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13484 {
13485 PUT_CODE (operands[1], reverse_condition (code));
13486 return "isel %0,%3,%2,%j1";
13487 }
13488 else
13489 return "isel %0,%2,%3,%j1";
13490}
13491
50a0b056 13492void
a2369ed3 13493rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13494{
13495 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13496 enum rtx_code c;
50a0b056 13497 rtx target;
5dc8d536
AH
13498
13499 if (code == SMAX || code == SMIN)
13500 c = GE;
13501 else
13502 c = GEU;
13503
50a0b056 13504 if (code == SMAX || code == UMAX)
f676971a 13505 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13506 op0, op1, mode, 0);
13507 else
f676971a 13508 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13509 op1, op0, mode, 0);
37409796 13510 gcc_assert (target);
50a0b056
GK
13511 if (target != dest)
13512 emit_move_insn (dest, target);
13513}
46c07df8 13514
915167f5
GK
13515/* Emit instructions to perform a load-reserved/store-conditional operation.
13516 The operation performed is an atomic
13517 (set M (CODE:MODE M OP))
13518 If not NULL, BEFORE is atomically set to M before the operation, and
13519 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13520 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13521 Either OP or M may be wrapped in a NOT operation. */
13522
13523void
13524rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13525 rtx m, rtx op, rtx before_param, rtx after_param,
13526 bool sync_p)
13527{
13528 enum machine_mode used_mode;
13529 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13530 rtx used_m;
13531 rtvec vec;
13532 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13533 rtx shift = NULL_RTX;
bb8df8a6 13534
915167f5
GK
13535 if (sync_p)
13536 emit_insn (gen_memory_barrier ());
bb8df8a6 13537
915167f5
GK
13538 if (GET_CODE (m) == NOT)
13539 used_m = XEXP (m, 0);
13540 else
13541 used_m = m;
13542
13543 /* If this is smaller than SImode, we'll have to use SImode with
13544 adjustments. */
13545 if (mode == QImode || mode == HImode)
13546 {
13547 rtx newop, oldop;
13548
13549 if (MEM_ALIGN (used_m) >= 32)
13550 {
13551 int ishift = 0;
13552 if (BYTES_BIG_ENDIAN)
13553 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13554
915167f5 13555 shift = GEN_INT (ishift);
c75c6d11 13556 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13557 }
13558 else
13559 {
13560 rtx addrSI, aligned_addr;
a9c9d3fa 13561 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13562
c75c6d11
JJ
13563 addrSI = gen_lowpart_common (SImode,
13564 force_reg (Pmode, XEXP (used_m, 0)));
13565 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13566 shift = gen_reg_rtx (SImode);
13567
13568 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13569 GEN_INT (shift_mask)));
13570 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13571
13572 aligned_addr = expand_binop (Pmode, and_optab,
13573 XEXP (used_m, 0),
13574 GEN_INT (-4), NULL_RTX,
13575 1, OPTAB_LIB_WIDEN);
13576 used_m = change_address (used_m, SImode, aligned_addr);
13577 set_mem_align (used_m, 32);
915167f5 13578 }
c75c6d11
JJ
13579 /* It's safe to keep the old alias set of USED_M, because
13580 the operation is atomic and only affects the original
13581 USED_M. */
13582 if (GET_CODE (m) == NOT)
13583 m = gen_rtx_NOT (SImode, used_m);
13584 else
13585 m = used_m;
915167f5
GK
13586
13587 if (GET_CODE (op) == NOT)
13588 {
13589 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13590 oldop = gen_rtx_NOT (SImode, oldop);
13591 }
13592 else
13593 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13594
915167f5
GK
13595 switch (code)
13596 {
13597 case IOR:
13598 case XOR:
13599 newop = expand_binop (SImode, and_optab,
13600 oldop, GEN_INT (imask), NULL_RTX,
13601 1, OPTAB_LIB_WIDEN);
13602 emit_insn (gen_ashlsi3 (newop, newop, shift));
13603 break;
13604
13605 case AND:
13606 newop = expand_binop (SImode, ior_optab,
13607 oldop, GEN_INT (~imask), NULL_RTX,
13608 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13609 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13610 break;
13611
13612 case PLUS:
9f0076e5 13613 case MINUS:
915167f5
GK
13614 {
13615 rtx mask;
bb8df8a6 13616
915167f5
GK
13617 newop = expand_binop (SImode, and_optab,
13618 oldop, GEN_INT (imask), NULL_RTX,
13619 1, OPTAB_LIB_WIDEN);
13620 emit_insn (gen_ashlsi3 (newop, newop, shift));
13621
13622 mask = gen_reg_rtx (SImode);
13623 emit_move_insn (mask, GEN_INT (imask));
13624 emit_insn (gen_ashlsi3 (mask, mask, shift));
13625
9f0076e5
DE
13626 if (code == PLUS)
13627 newop = gen_rtx_PLUS (SImode, m, newop);
13628 else
13629 newop = gen_rtx_MINUS (SImode, m, newop);
13630 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13631 newop = gen_rtx_IOR (SImode, newop,
13632 gen_rtx_AND (SImode,
13633 gen_rtx_NOT (SImode, mask),
13634 m));
13635 break;
13636 }
13637
13638 default:
13639 gcc_unreachable ();
13640 }
13641
a9c9d3fa
GK
13642 if (GET_CODE (m) == NOT)
13643 {
13644 rtx mask, xorm;
13645
13646 mask = gen_reg_rtx (SImode);
13647 emit_move_insn (mask, GEN_INT (imask));
13648 emit_insn (gen_ashlsi3 (mask, mask, shift));
13649
13650 xorm = gen_rtx_XOR (SImode, used_m, mask);
13651 /* Depending on the value of 'op', the XOR or the operation might
13652 be able to be simplified away. */
13653 newop = simplify_gen_binary (code, SImode, xorm, newop);
13654 }
915167f5
GK
13655 op = newop;
13656 used_mode = SImode;
13657 before = gen_reg_rtx (used_mode);
13658 after = gen_reg_rtx (used_mode);
13659 }
13660 else
13661 {
13662 used_mode = mode;
13663 before = before_param;
13664 after = after_param;
13665
13666 if (before == NULL_RTX)
13667 before = gen_reg_rtx (used_mode);
13668 if (after == NULL_RTX)
13669 after = gen_reg_rtx (used_mode);
13670 }
bb8df8a6 13671
9f0076e5
DE
13672 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13673 && used_mode != mode)
915167f5
GK
13674 the_op = op; /* Computed above. */
13675 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13676 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13677 else
13678 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13679
13680 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13681 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13682 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13683 gen_rtx_UNSPEC (used_mode,
13684 gen_rtvec (1, the_op),
13685 UNSPEC_SYNC_OP));
915167f5
GK
13686 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13687
9f0076e5 13688 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13689 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13690 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13691 else
13692 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13693 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13694
13695 /* Shift and mask the return values properly. */
13696 if (used_mode != mode && before_param)
13697 {
13698 emit_insn (gen_lshrsi3 (before, before, shift));
13699 convert_move (before_param, before, 1);
13700 }
13701
13702 if (used_mode != mode && after_param)
13703 {
13704 emit_insn (gen_lshrsi3 (after, after, shift));
13705 convert_move (after_param, after, 1);
13706 }
13707
13708 /* The previous sequence will end with a branch that's dependent on
13709 the conditional store, so placing an isync will ensure that no
13710 other instructions (especially, no load or store instructions)
13711 can start before the atomic operation completes. */
13712 if (sync_p)
13713 emit_insn (gen_isync ());
13714}
13715
b52110d4
DE
13716/* A subroutine of the atomic operation splitters. Jump to LABEL if
13717 COND is true. Mark the jump as unlikely to be taken. */
13718
13719static void
13720emit_unlikely_jump (rtx cond, rtx label)
13721{
13722 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13723 rtx x;
13724
13725 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13726 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13727 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13728}
13729
13730/* A subroutine of the atomic operation splitters. Emit a load-locked
13731 instruction in MODE. */
13732
13733static void
13734emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13735{
13736 rtx (*fn) (rtx, rtx) = NULL;
13737 if (mode == SImode)
13738 fn = gen_load_locked_si;
13739 else if (mode == DImode)
13740 fn = gen_load_locked_di;
13741 emit_insn (fn (reg, mem));
13742}
13743
13744/* A subroutine of the atomic operation splitters. Emit a store-conditional
13745 instruction in MODE. */
13746
13747static void
13748emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13749{
13750 rtx (*fn) (rtx, rtx, rtx) = NULL;
13751 if (mode == SImode)
13752 fn = gen_store_conditional_si;
13753 else if (mode == DImode)
13754 fn = gen_store_conditional_di;
13755
9f0076e5 13756 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13757 if (PPC405_ERRATUM77)
13758 emit_insn (gen_memory_barrier ());
13759
13760 emit_insn (fn (res, mem, val));
13761}
13762
ea2c620c 13763/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13764 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13765 operand of the binary operator. BEFORE and AFTER are optional locations to
13766 return the value of MEM either before of after the operation. SCRATCH is
13767 a scratch register. */
13768
13769void
13770rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13771 rtx before, rtx after, rtx scratch)
13772{
13773 enum machine_mode mode = GET_MODE (mem);
13774 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13775
13776 emit_insn (gen_memory_barrier ());
13777
13778 label = gen_label_rtx ();
13779 emit_label (label);
13780 label = gen_rtx_LABEL_REF (VOIDmode, label);
13781
13782 if (before == NULL_RTX)
13783 before = scratch;
13784 emit_load_locked (mode, before, mem);
13785
13786 if (code == NOT)
13787 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13788 else if (code == AND)
13789 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13790 else
13791 x = gen_rtx_fmt_ee (code, mode, before, val);
13792
13793 if (after != NULL_RTX)
13794 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13795 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13796
13797 emit_store_conditional (mode, cond, mem, scratch);
13798
13799 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13800 emit_unlikely_jump (x, label);
13801
13802 emit_insn (gen_isync ());
13803}
13804
b52110d4
DE
13805/* Expand an atomic compare and swap operation. MEM is the memory on which
13806 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13807 value to be stored. SCRATCH is a scratch GPR. */
13808
13809void
13810rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13811 rtx scratch)
13812{
13813 enum machine_mode mode = GET_MODE (mem);
13814 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13815
13816 emit_insn (gen_memory_barrier ());
13817
13818 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13819 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13820 emit_label (XEXP (label1, 0));
13821
13822 emit_load_locked (mode, retval, mem);
13823
13824 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13825 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13826
13827 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13828 emit_unlikely_jump (x, label2);
13829
13830 emit_move_insn (scratch, newval);
13831 emit_store_conditional (mode, cond, mem, scratch);
13832
13833 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13834 emit_unlikely_jump (x, label1);
13835
13836 emit_insn (gen_isync ());
13837 emit_label (XEXP (label2, 0));
13838}
13839
13840/* Expand an atomic test and set operation. MEM is the memory on which
13841 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13842
13843void
13844rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13845{
13846 enum machine_mode mode = GET_MODE (mem);
13847 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13848
13849 emit_insn (gen_memory_barrier ());
13850
13851 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13852 emit_label (XEXP (label, 0));
13853
13854 emit_load_locked (mode, retval, mem);
13855 emit_move_insn (scratch, val);
13856 emit_store_conditional (mode, cond, mem, scratch);
13857
13858 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13859 emit_unlikely_jump (x, label);
13860
13861 emit_insn (gen_isync ());
13862}
13863
9fc75b97
DE
13864void
13865rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13866{
13867 enum machine_mode mode = GET_MODE (mem);
13868 rtx addrSI, align, wdst, shift, mask;
13869 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13870 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13871
13872 /* Shift amount for subword relative to aligned word. */
13873 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13874 shift = gen_reg_rtx (SImode);
13875 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13876 GEN_INT (shift_mask)));
13877 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13878
13879 /* Shift and mask old value into position within word. */
13880 oldval = convert_modes (SImode, mode, oldval, 1);
13881 oldval = expand_binop (SImode, and_optab,
13882 oldval, GEN_INT (imask), NULL_RTX,
13883 1, OPTAB_LIB_WIDEN);
13884 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13885
13886 /* Shift and mask new value into position within word. */
13887 newval = convert_modes (SImode, mode, newval, 1);
13888 newval = expand_binop (SImode, and_optab,
13889 newval, GEN_INT (imask), NULL_RTX,
13890 1, OPTAB_LIB_WIDEN);
13891 emit_insn (gen_ashlsi3 (newval, newval, shift));
13892
13893 /* Mask for insertion. */
13894 mask = gen_reg_rtx (SImode);
13895 emit_move_insn (mask, GEN_INT (imask));
13896 emit_insn (gen_ashlsi3 (mask, mask, shift));
13897
13898 /* Address of aligned word containing subword. */
13899 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13900 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13901 mem = change_address (mem, SImode, align);
13902 set_mem_align (mem, 32);
13903 MEM_VOLATILE_P (mem) = 1;
13904
13905 wdst = gen_reg_rtx (SImode);
13906 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13907 oldval, newval, mem));
13908
2725b75c
JJ
13909 /* Shift the result back. */
13910 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
13911
9fc75b97
DE
13912 emit_move_insn (dst, gen_lowpart (mode, wdst));
13913}
13914
13915void
13916rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13917 rtx oldval, rtx newval, rtx mem,
13918 rtx scratch)
13919{
13920 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13921
13922 emit_insn (gen_memory_barrier ());
13923 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13924 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13925 emit_label (XEXP (label1, 0));
13926
13927 emit_load_locked (SImode, scratch, mem);
13928
13929 /* Mask subword within loaded value for comparison with oldval.
13930 Use UNSPEC_AND to avoid clobber.*/
13931 emit_insn (gen_rtx_SET (SImode, dest,
13932 gen_rtx_UNSPEC (SImode,
13933 gen_rtvec (2, scratch, mask),
13934 UNSPEC_AND)));
13935
13936 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13937 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13938
13939 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13940 emit_unlikely_jump (x, label2);
13941
13942 /* Clear subword within loaded value for insertion of new value. */
13943 emit_insn (gen_rtx_SET (SImode, scratch,
13944 gen_rtx_AND (SImode,
13945 gen_rtx_NOT (SImode, mask), scratch)));
13946 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13947 emit_store_conditional (SImode, cond, mem, scratch);
13948
13949 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13950 emit_unlikely_jump (x, label1);
13951
13952 emit_insn (gen_isync ());
13953 emit_label (XEXP (label2, 0));
13954}
13955
13956
b52110d4 13957 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13958 multi-register moves. It will emit at most one instruction for
13959 each register that is accessed; that is, it won't emit li/lis pairs
13960 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13961 register. */
46c07df8 13962
46c07df8 13963void
a9baceb1 13964rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13965{
a9baceb1
GK
13966 /* The register number of the first register being moved. */
13967 int reg;
13968 /* The mode that is to be moved. */
13969 enum machine_mode mode;
13970 /* The mode that the move is being done in, and its size. */
13971 enum machine_mode reg_mode;
13972 int reg_mode_size;
13973 /* The number of registers that will be moved. */
13974 int nregs;
13975
13976 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13977 mode = GET_MODE (dst);
c8b622ff 13978 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13979 if (FP_REGNO_P (reg))
7393f7f8 13980 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13981 else if (ALTIVEC_REGNO_P (reg))
13982 reg_mode = V16QImode;
4d4447b5
PB
13983 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13984 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13985 else
13986 reg_mode = word_mode;
13987 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13988
37409796 13989 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13990
a9baceb1
GK
13991 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13992 {
13993 /* Move register range backwards, if we might have destructive
13994 overlap. */
13995 int i;
13996 for (i = nregs - 1; i >= 0; i--)
f676971a 13997 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13998 simplify_gen_subreg (reg_mode, dst, mode,
13999 i * reg_mode_size),
14000 simplify_gen_subreg (reg_mode, src, mode,
14001 i * reg_mode_size)));
14002 }
46c07df8
HP
14003 else
14004 {
a9baceb1
GK
14005 int i;
14006 int j = -1;
14007 bool used_update = false;
46c07df8 14008
c1e55850 14009 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14010 {
14011 rtx breg;
3a1f863f 14012
a9baceb1
GK
14013 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14014 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14015 {
14016 rtx delta_rtx;
a9baceb1 14017 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14018 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14019 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14020 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14021 emit_insn (TARGET_32BIT
14022 ? gen_addsi3 (breg, breg, delta_rtx)
14023 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14024 src = replace_equiv_address (src, breg);
3a1f863f 14025 }
d04b6e6e 14026 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14027 {
13e2e16e 14028 rtx basereg;
c1e55850
GK
14029 basereg = gen_rtx_REG (Pmode, reg);
14030 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14031 src = replace_equiv_address (src, basereg);
c1e55850 14032 }
3a1f863f 14033
0423421f
AM
14034 breg = XEXP (src, 0);
14035 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14036 breg = XEXP (breg, 0);
14037
14038 /* If the base register we are using to address memory is
14039 also a destination reg, then change that register last. */
14040 if (REG_P (breg)
14041 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14042 && REGNO (breg) < REGNO (dst) + nregs)
14043 j = REGNO (breg) - REGNO (dst);
c4ad648e 14044 }
46c07df8 14045
a9baceb1 14046 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14047 {
14048 rtx breg;
14049
a9baceb1
GK
14050 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14051 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14052 {
14053 rtx delta_rtx;
a9baceb1 14054 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14055 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14056 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14057 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14058
14059 /* We have to update the breg before doing the store.
14060 Use store with update, if available. */
14061
14062 if (TARGET_UPDATE)
14063 {
a9baceb1 14064 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14065 emit_insn (TARGET_32BIT
14066 ? (TARGET_POWERPC64
14067 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14068 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14069 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14070 used_update = true;
3a1f863f
DE
14071 }
14072 else
a9baceb1
GK
14073 emit_insn (TARGET_32BIT
14074 ? gen_addsi3 (breg, breg, delta_rtx)
14075 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14076 dst = replace_equiv_address (dst, breg);
3a1f863f 14077 }
37409796 14078 else
d04b6e6e 14079 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14080 }
14081
46c07df8 14082 for (i = 0; i < nregs; i++)
f676971a 14083 {
3a1f863f
DE
14084 /* Calculate index to next subword. */
14085 ++j;
f676971a 14086 if (j == nregs)
3a1f863f 14087 j = 0;
46c07df8 14088
112cdef5 14089 /* If compiler already emitted move of first word by
a9baceb1 14090 store with update, no need to do anything. */
3a1f863f 14091 if (j == 0 && used_update)
a9baceb1 14092 continue;
f676971a 14093
a9baceb1
GK
14094 emit_insn (gen_rtx_SET (VOIDmode,
14095 simplify_gen_subreg (reg_mode, dst, mode,
14096 j * reg_mode_size),
14097 simplify_gen_subreg (reg_mode, src, mode,
14098 j * reg_mode_size)));
3a1f863f 14099 }
46c07df8
HP
14100 }
14101}
14102
12a4e8c5 14103\f
a4f6c312
SS
14104/* This page contains routines that are used to determine what the
14105 function prologue and epilogue code will do and write them out. */
9878760c 14106
a4f6c312
SS
14107/* Return the first fixed-point register that is required to be
14108 saved. 32 if none. */
9878760c
RK
14109
14110int
863d938c 14111first_reg_to_save (void)
9878760c
RK
14112{
14113 int first_reg;
14114
14115 /* Find lowest numbered live register. */
14116 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14117 if (df_regs_ever_live_p (first_reg)
a38d360d 14118 && (! call_used_regs[first_reg]
1db02437 14119 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14120 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14121 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14122 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14123 break;
14124
ee890fe2 14125#if TARGET_MACHO
93638d7a
AM
14126 if (flag_pic
14127 && current_function_uses_pic_offset_table
14128 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14129 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14130#endif
14131
9878760c
RK
14132 return first_reg;
14133}
14134
14135/* Similar, for FP regs. */
14136
14137int
863d938c 14138first_fp_reg_to_save (void)
9878760c
RK
14139{
14140 int first_reg;
14141
14142 /* Find lowest numbered live register. */
14143 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14144 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14145 break;
14146
14147 return first_reg;
14148}
00b960c7
AH
14149
14150/* Similar, for AltiVec regs. */
14151
14152static int
863d938c 14153first_altivec_reg_to_save (void)
00b960c7
AH
14154{
14155 int i;
14156
14157 /* Stack frame remains as is unless we are in AltiVec ABI. */
14158 if (! TARGET_ALTIVEC_ABI)
14159 return LAST_ALTIVEC_REGNO + 1;
14160
22fa69da 14161 /* On Darwin, the unwind routines are compiled without
982afe02 14162 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14163 altivec registers when necessary. */
14164 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14165 && ! TARGET_ALTIVEC)
14166 return FIRST_ALTIVEC_REGNO + 20;
14167
00b960c7
AH
14168 /* Find lowest numbered live register. */
14169 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14170 if (df_regs_ever_live_p (i))
00b960c7
AH
14171 break;
14172
14173 return i;
14174}
14175
14176/* Return a 32-bit mask of the AltiVec registers we need to set in
14177 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14178 the 32-bit word is 0. */
14179
14180static unsigned int
863d938c 14181compute_vrsave_mask (void)
00b960c7
AH
14182{
14183 unsigned int i, mask = 0;
14184
22fa69da 14185 /* On Darwin, the unwind routines are compiled without
982afe02 14186 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14187 call-saved altivec registers when necessary. */
14188 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14189 && ! TARGET_ALTIVEC)
14190 mask |= 0xFFF;
14191
00b960c7
AH
14192 /* First, find out if we use _any_ altivec registers. */
14193 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14194 if (df_regs_ever_live_p (i))
00b960c7
AH
14195 mask |= ALTIVEC_REG_BIT (i);
14196
14197 if (mask == 0)
14198 return mask;
14199
00b960c7
AH
14200 /* Next, remove the argument registers from the set. These must
14201 be in the VRSAVE mask set by the caller, so we don't need to add
14202 them in again. More importantly, the mask we compute here is
14203 used to generate CLOBBERs in the set_vrsave insn, and we do not
14204 wish the argument registers to die. */
a6cf80f2 14205 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14206 mask &= ~ALTIVEC_REG_BIT (i);
14207
14208 /* Similarly, remove the return value from the set. */
14209 {
14210 bool yes = false;
14211 diddle_return_value (is_altivec_return_reg, &yes);
14212 if (yes)
14213 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14214 }
14215
14216 return mask;
14217}
14218
d62294f5 14219/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14220 size of prologues/epilogues by calling our own save/restore-the-world
14221 routines. */
d62294f5
FJ
14222
14223static void
f57fe068
AM
14224compute_save_world_info (rs6000_stack_t *info_ptr)
14225{
14226 info_ptr->world_save_p = 1;
14227 info_ptr->world_save_p
14228 = (WORLD_SAVE_P (info_ptr)
14229 && DEFAULT_ABI == ABI_DARWIN
14230 && ! (current_function_calls_setjmp && flag_exceptions)
14231 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14232 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14233 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14234 && info_ptr->cr_save_p);
f676971a 14235
d62294f5
FJ
14236 /* This will not work in conjunction with sibcalls. Make sure there
14237 are none. (This check is expensive, but seldom executed.) */
f57fe068 14238 if (WORLD_SAVE_P (info_ptr))
f676971a 14239 {
d62294f5
FJ
14240 rtx insn;
14241 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14242 if ( GET_CODE (insn) == CALL_INSN
14243 && SIBLING_CALL_P (insn))
14244 {
14245 info_ptr->world_save_p = 0;
14246 break;
14247 }
d62294f5 14248 }
f676971a 14249
f57fe068 14250 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14251 {
14252 /* Even if we're not touching VRsave, make sure there's room on the
14253 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14254 will attempt to save it. */
d62294f5
FJ
14255 info_ptr->vrsave_size = 4;
14256
14257 /* "Save" the VRsave register too if we're saving the world. */
14258 if (info_ptr->vrsave_mask == 0)
c4ad648e 14259 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14260
14261 /* Because the Darwin register save/restore routines only handle
c4ad648e 14262 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14263 check. */
37409796
NS
14264 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14265 && (info_ptr->first_altivec_reg_save
14266 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14267 }
f676971a 14268 return;
d62294f5
FJ
14269}
14270
14271
00b960c7 14272static void
a2369ed3 14273is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14274{
14275 bool *yes = (bool *) xyes;
14276 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14277 *yes = true;
14278}
14279
4697a36c
MM
14280\f
14281/* Calculate the stack information for the current function. This is
14282 complicated by having two separate calling sequences, the AIX calling
14283 sequence and the V.4 calling sequence.
14284
592696dd 14285 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14286 32-bit 64-bit
4697a36c 14287 SP----> +---------------------------------------+
a260abc9 14288 | back chain to caller | 0 0
4697a36c 14289 +---------------------------------------+
a260abc9 14290 | saved CR | 4 8 (8-11)
4697a36c 14291 +---------------------------------------+
a260abc9 14292 | saved LR | 8 16
4697a36c 14293 +---------------------------------------+
a260abc9 14294 | reserved for compilers | 12 24
4697a36c 14295 +---------------------------------------+
a260abc9 14296 | reserved for binders | 16 32
4697a36c 14297 +---------------------------------------+
a260abc9 14298 | saved TOC pointer | 20 40
4697a36c 14299 +---------------------------------------+
a260abc9 14300 | Parameter save area (P) | 24 48
4697a36c 14301 +---------------------------------------+
a260abc9 14302 | Alloca space (A) | 24+P etc.
802a0058 14303 +---------------------------------------+
a7df97e6 14304 | Local variable space (L) | 24+P+A
4697a36c 14305 +---------------------------------------+
a7df97e6 14306 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14307 +---------------------------------------+
00b960c7
AH
14308 | Save area for AltiVec registers (W) | 24+P+A+L+X
14309 +---------------------------------------+
14310 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14311 +---------------------------------------+
14312 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14313 +---------------------------------------+
00b960c7
AH
14314 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14315 +---------------------------------------+
14316 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14317 +---------------------------------------+
14318 old SP->| back chain to caller's caller |
14319 +---------------------------------------+
14320
5376a30c
KR
14321 The required alignment for AIX configurations is two words (i.e., 8
14322 or 16 bytes).
14323
14324
4697a36c
MM
14325 V.4 stack frames look like:
14326
14327 SP----> +---------------------------------------+
14328 | back chain to caller | 0
14329 +---------------------------------------+
5eb387b8 14330 | caller's saved LR | 4
4697a36c
MM
14331 +---------------------------------------+
14332 | Parameter save area (P) | 8
14333 +---------------------------------------+
a7df97e6 14334 | Alloca space (A) | 8+P
f676971a 14335 +---------------------------------------+
a7df97e6 14336 | Varargs save area (V) | 8+P+A
f676971a 14337 +---------------------------------------+
a7df97e6 14338 | Local variable space (L) | 8+P+A+V
f676971a 14339 +---------------------------------------+
a7df97e6 14340 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14341 +---------------------------------------+
00b960c7
AH
14342 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14343 +---------------------------------------+
14344 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14345 +---------------------------------------+
14346 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14347 +---------------------------------------+
c4ad648e
AM
14348 | SPE: area for 64-bit GP registers |
14349 +---------------------------------------+
14350 | SPE alignment padding |
14351 +---------------------------------------+
00b960c7 14352 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14353 +---------------------------------------+
00b960c7 14354 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14355 +---------------------------------------+
00b960c7 14356 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14357 +---------------------------------------+
14358 old SP->| back chain to caller's caller |
14359 +---------------------------------------+
b6c9286a 14360
5376a30c
KR
14361 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14362 given. (But note below and in sysv4.h that we require only 8 and
14363 may round up the size of our stack frame anyways. The historical
14364 reason is early versions of powerpc-linux which didn't properly
14365 align the stack at program startup. A happy side-effect is that
14366 -mno-eabi libraries can be used with -meabi programs.)
14367
50d440bc 14368 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14369 the stack alignment requirements may differ. If -mno-eabi is not
14370 given, the required stack alignment is 8 bytes; if -mno-eabi is
14371 given, the required alignment is 16 bytes. (But see V.4 comment
14372 above.) */
4697a36c 14373
61b2fbe7
MM
14374#ifndef ABI_STACK_BOUNDARY
14375#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14376#endif
14377
d1d0c603 14378static rs6000_stack_t *
863d938c 14379rs6000_stack_info (void)
4697a36c 14380{
022123e6 14381 static rs6000_stack_t info;
4697a36c 14382 rs6000_stack_t *info_ptr = &info;
327e5343 14383 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14384 int ehrd_size;
64045029 14385 int save_align;
8070c91a 14386 int first_gp;
44688022 14387 HOST_WIDE_INT non_fixed_size;
4697a36c 14388
022123e6 14389 memset (&info, 0, sizeof (info));
4697a36c 14390
c19de7aa
AH
14391 if (TARGET_SPE)
14392 {
14393 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14394 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14395 cfun->machine->insn_chain_scanned_p
14396 = spe_func_has_64bit_regs_p () + 1;
14397 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14398 }
14399
a4f6c312 14400 /* Select which calling sequence. */
178274da 14401 info_ptr->abi = DEFAULT_ABI;
9878760c 14402
a4f6c312 14403 /* Calculate which registers need to be saved & save area size. */
4697a36c 14404 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14405 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14406 even if it currently looks like we won't. Reload may need it to
14407 get at a constant; if so, it will have already created a constant
14408 pool entry for it. */
2bfcf297 14409 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14410 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14411 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14412 && current_function_uses_const_pool
1db02437 14413 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14414 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14415 else
8070c91a
DJ
14416 first_gp = info_ptr->first_gp_reg_save;
14417
14418 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14419
a3170dc6
AH
14420 /* For the SPE, we have an additional upper 32-bits on each GPR.
14421 Ideally we should save the entire 64-bits only when the upper
14422 half is used in SIMD instructions. Since we only record
14423 registers live (not the size they are used in), this proves
14424 difficult because we'd have to traverse the instruction chain at
14425 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14426 so we opt to save the GPRs in 64-bits always if but one register
14427 gets used in 64-bits. Otherwise, all the registers in the frame
14428 get saved in 32-bits.
a3170dc6 14429
c19de7aa 14430 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14431 traditional GP save area will be empty. */
c19de7aa 14432 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14433 info_ptr->gp_size = 0;
14434
4697a36c
MM
14435 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14436 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14437
00b960c7
AH
14438 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14439 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14440 - info_ptr->first_altivec_reg_save);
14441
592696dd 14442 /* Does this function call anything? */
71f123ca
FS
14443 info_ptr->calls_p = (! current_function_is_leaf
14444 || cfun->machine->ra_needs_full_frame);
b6c9286a 14445
a4f6c312 14446 /* Determine if we need to save the link register. */
022123e6
AM
14447 if ((DEFAULT_ABI == ABI_AIX
14448 && current_function_profile
14449 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14450#ifdef TARGET_RELOCATABLE
14451 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14452#endif
14453 || (info_ptr->first_fp_reg_save != 64
14454 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
178274da 14455 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14456 || info_ptr->calls_p
14457 || rs6000_ra_ever_killed ())
4697a36c
MM
14458 {
14459 info_ptr->lr_save_p = 1;
1de43f85 14460 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14461 }
14462
9ebbca7d 14463 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14464 if (df_regs_ever_live_p (CR2_REGNO)
14465 || df_regs_ever_live_p (CR3_REGNO)
14466 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14467 {
14468 info_ptr->cr_save_p = 1;
178274da 14469 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14470 info_ptr->cr_size = reg_size;
14471 }
14472
83720594
RH
14473 /* If the current function calls __builtin_eh_return, then we need
14474 to allocate stack space for registers that will hold data for
14475 the exception handler. */
14476 if (current_function_calls_eh_return)
14477 {
14478 unsigned int i;
14479 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14480 continue;
a3170dc6
AH
14481
14482 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14483 ehrd_size = i * (TARGET_SPE_ABI
14484 && info_ptr->spe_64bit_regs_used != 0
14485 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14486 }
14487 else
14488 ehrd_size = 0;
14489
592696dd 14490 /* Determine various sizes. */
4697a36c
MM
14491 info_ptr->reg_size = reg_size;
14492 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14493 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14494 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14495 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14496 if (FRAME_GROWS_DOWNWARD)
14497 info_ptr->vars_size
5b667039
JJ
14498 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14499 + info_ptr->parm_size,
7d5175e1 14500 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14501 - (info_ptr->fixed_size + info_ptr->vars_size
14502 + info_ptr->parm_size);
00b960c7 14503
c19de7aa 14504 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14505 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14506 else
14507 info_ptr->spe_gp_size = 0;
14508
4d774ff8
HP
14509 if (TARGET_ALTIVEC_ABI)
14510 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14511 else
4d774ff8
HP
14512 info_ptr->vrsave_mask = 0;
14513
14514 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14515 info_ptr->vrsave_size = 4;
14516 else
14517 info_ptr->vrsave_size = 0;
b6c9286a 14518
d62294f5
FJ
14519 compute_save_world_info (info_ptr);
14520
592696dd 14521 /* Calculate the offsets. */
178274da 14522 switch (DEFAULT_ABI)
4697a36c 14523 {
b6c9286a 14524 case ABI_NONE:
24d304eb 14525 default:
37409796 14526 gcc_unreachable ();
b6c9286a
MM
14527
14528 case ABI_AIX:
ee890fe2 14529 case ABI_DARWIN:
b6c9286a
MM
14530 info_ptr->fp_save_offset = - info_ptr->fp_size;
14531 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14532
14533 if (TARGET_ALTIVEC_ABI)
14534 {
14535 info_ptr->vrsave_save_offset
14536 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14537
982afe02 14538 /* Align stack so vector save area is on a quadword boundary.
9278121c 14539 The padding goes above the vectors. */
00b960c7
AH
14540 if (info_ptr->altivec_size != 0)
14541 info_ptr->altivec_padding_size
9278121c 14542 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14543 else
14544 info_ptr->altivec_padding_size = 0;
14545
14546 info_ptr->altivec_save_offset
14547 = info_ptr->vrsave_save_offset
14548 - info_ptr->altivec_padding_size
14549 - info_ptr->altivec_size;
9278121c
GK
14550 gcc_assert (info_ptr->altivec_size == 0
14551 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14552
14553 /* Adjust for AltiVec case. */
14554 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14555 }
14556 else
14557 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14558 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14559 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14560 break;
14561
14562 case ABI_V4:
b6c9286a
MM
14563 info_ptr->fp_save_offset = - info_ptr->fp_size;
14564 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14565 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14566
c19de7aa 14567 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14568 {
14569 /* Align stack so SPE GPR save area is aligned on a
14570 double-word boundary. */
14571 if (info_ptr->spe_gp_size != 0)
14572 info_ptr->spe_padding_size
14573 = 8 - (-info_ptr->cr_save_offset % 8);
14574 else
14575 info_ptr->spe_padding_size = 0;
14576
14577 info_ptr->spe_gp_save_offset
14578 = info_ptr->cr_save_offset
14579 - info_ptr->spe_padding_size
14580 - info_ptr->spe_gp_size;
14581
14582 /* Adjust for SPE case. */
022123e6 14583 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14584 }
a3170dc6 14585 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14586 {
14587 info_ptr->vrsave_save_offset
14588 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14589
14590 /* Align stack so vector save area is on a quadword boundary. */
14591 if (info_ptr->altivec_size != 0)
14592 info_ptr->altivec_padding_size
14593 = 16 - (-info_ptr->vrsave_save_offset % 16);
14594 else
14595 info_ptr->altivec_padding_size = 0;
14596
14597 info_ptr->altivec_save_offset
14598 = info_ptr->vrsave_save_offset
14599 - info_ptr->altivec_padding_size
14600 - info_ptr->altivec_size;
14601
14602 /* Adjust for AltiVec case. */
022123e6 14603 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14604 }
14605 else
022123e6
AM
14606 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14607 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14608 info_ptr->lr_save_offset = reg_size;
14609 break;
4697a36c
MM
14610 }
14611
64045029 14612 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14613 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14614 + info_ptr->gp_size
14615 + info_ptr->altivec_size
14616 + info_ptr->altivec_padding_size
a3170dc6
AH
14617 + info_ptr->spe_gp_size
14618 + info_ptr->spe_padding_size
00b960c7
AH
14619 + ehrd_size
14620 + info_ptr->cr_size
022123e6 14621 + info_ptr->vrsave_size,
64045029 14622 save_align);
00b960c7 14623
44688022 14624 non_fixed_size = (info_ptr->vars_size
ff381587 14625 + info_ptr->parm_size
5b667039 14626 + info_ptr->save_size);
ff381587 14627
44688022
AM
14628 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14629 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14630
14631 /* Determine if we need to allocate any stack frame:
14632
a4f6c312
SS
14633 For AIX we need to push the stack if a frame pointer is needed
14634 (because the stack might be dynamically adjusted), if we are
14635 debugging, if we make calls, or if the sum of fp_save, gp_save,
14636 and local variables are more than the space needed to save all
14637 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14638 + 18*8 = 288 (GPR13 reserved).
ff381587 14639
a4f6c312
SS
14640 For V.4 we don't have the stack cushion that AIX uses, but assume
14641 that the debugger can handle stackless frames. */
ff381587
MM
14642
14643 if (info_ptr->calls_p)
14644 info_ptr->push_p = 1;
14645
178274da 14646 else if (DEFAULT_ABI == ABI_V4)
44688022 14647 info_ptr->push_p = non_fixed_size != 0;
ff381587 14648
178274da
AM
14649 else if (frame_pointer_needed)
14650 info_ptr->push_p = 1;
14651
14652 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14653 info_ptr->push_p = 1;
14654
ff381587 14655 else
44688022 14656 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14657
a4f6c312 14658 /* Zero offsets if we're not saving those registers. */
8dda1a21 14659 if (info_ptr->fp_size == 0)
4697a36c
MM
14660 info_ptr->fp_save_offset = 0;
14661
8dda1a21 14662 if (info_ptr->gp_size == 0)
4697a36c
MM
14663 info_ptr->gp_save_offset = 0;
14664
00b960c7
AH
14665 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14666 info_ptr->altivec_save_offset = 0;
14667
14668 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14669 info_ptr->vrsave_save_offset = 0;
14670
c19de7aa
AH
14671 if (! TARGET_SPE_ABI
14672 || info_ptr->spe_64bit_regs_used == 0
14673 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14674 info_ptr->spe_gp_save_offset = 0;
14675
c81fc13e 14676 if (! info_ptr->lr_save_p)
4697a36c
MM
14677 info_ptr->lr_save_offset = 0;
14678
c81fc13e 14679 if (! info_ptr->cr_save_p)
4697a36c
MM
14680 info_ptr->cr_save_offset = 0;
14681
14682 return info_ptr;
14683}
14684
c19de7aa
AH
14685/* Return true if the current function uses any GPRs in 64-bit SIMD
14686 mode. */
14687
14688static bool
863d938c 14689spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14690{
14691 rtx insns, insn;
14692
14693 /* Functions that save and restore all the call-saved registers will
14694 need to save/restore the registers in 64-bits. */
14695 if (current_function_calls_eh_return
14696 || current_function_calls_setjmp
14697 || current_function_has_nonlocal_goto)
14698 return true;
14699
14700 insns = get_insns ();
14701
14702 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14703 {
14704 if (INSN_P (insn))
14705 {
14706 rtx i;
14707
b5a5beb9
AH
14708 /* FIXME: This should be implemented with attributes...
14709
14710 (set_attr "spe64" "true")....then,
14711 if (get_spe64(insn)) return true;
14712
14713 It's the only reliable way to do the stuff below. */
14714
c19de7aa 14715 i = PATTERN (insn);
f82f556d
AH
14716 if (GET_CODE (i) == SET)
14717 {
14718 enum machine_mode mode = GET_MODE (SET_SRC (i));
14719
14720 if (SPE_VECTOR_MODE (mode))
14721 return true;
4d4447b5
PB
14722 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14723 || mode == DDmode || mode == TDmode))
f82f556d
AH
14724 return true;
14725 }
c19de7aa
AH
14726 }
14727 }
14728
14729 return false;
14730}
14731
d1d0c603 14732static void
a2369ed3 14733debug_stack_info (rs6000_stack_t *info)
9878760c 14734{
d330fd93 14735 const char *abi_string;
24d304eb 14736
c81fc13e 14737 if (! info)
4697a36c
MM
14738 info = rs6000_stack_info ();
14739
14740 fprintf (stderr, "\nStack information for function %s:\n",
14741 ((current_function_decl && DECL_NAME (current_function_decl))
14742 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14743 : "<unknown>"));
14744
24d304eb
RK
14745 switch (info->abi)
14746 {
b6c9286a
MM
14747 default: abi_string = "Unknown"; break;
14748 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14749 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14750 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14751 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14752 }
14753
14754 fprintf (stderr, "\tABI = %5s\n", abi_string);
14755
00b960c7
AH
14756 if (TARGET_ALTIVEC_ABI)
14757 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14758
a3170dc6
AH
14759 if (TARGET_SPE_ABI)
14760 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14761
4697a36c
MM
14762 if (info->first_gp_reg_save != 32)
14763 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14764
14765 if (info->first_fp_reg_save != 64)
14766 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14767
00b960c7
AH
14768 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14769 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14770 info->first_altivec_reg_save);
14771
4697a36c
MM
14772 if (info->lr_save_p)
14773 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14774
4697a36c
MM
14775 if (info->cr_save_p)
14776 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14777
00b960c7
AH
14778 if (info->vrsave_mask)
14779 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14780
4697a36c
MM
14781 if (info->push_p)
14782 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14783
14784 if (info->calls_p)
14785 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14786
4697a36c
MM
14787 if (info->gp_save_offset)
14788 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14789
14790 if (info->fp_save_offset)
14791 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14792
00b960c7
AH
14793 if (info->altivec_save_offset)
14794 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14795 info->altivec_save_offset);
14796
a3170dc6
AH
14797 if (info->spe_gp_save_offset)
14798 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14799 info->spe_gp_save_offset);
14800
00b960c7
AH
14801 if (info->vrsave_save_offset)
14802 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14803 info->vrsave_save_offset);
14804
4697a36c
MM
14805 if (info->lr_save_offset)
14806 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14807
14808 if (info->cr_save_offset)
14809 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14810
14811 if (info->varargs_save_offset)
14812 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14813
14814 if (info->total_size)
d1d0c603
JJ
14815 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14816 info->total_size);
4697a36c 14817
4697a36c 14818 if (info->vars_size)
d1d0c603
JJ
14819 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14820 info->vars_size);
4697a36c
MM
14821
14822 if (info->parm_size)
14823 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14824
14825 if (info->fixed_size)
14826 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14827
14828 if (info->gp_size)
14829 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14830
a3170dc6
AH
14831 if (info->spe_gp_size)
14832 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14833
4697a36c
MM
14834 if (info->fp_size)
14835 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14836
00b960c7
AH
14837 if (info->altivec_size)
14838 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14839
14840 if (info->vrsave_size)
14841 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14842
14843 if (info->altivec_padding_size)
14844 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14845 info->altivec_padding_size);
14846
a3170dc6
AH
14847 if (info->spe_padding_size)
14848 fprintf (stderr, "\tspe_padding_size = %5d\n",
14849 info->spe_padding_size);
14850
4697a36c
MM
14851 if (info->cr_size)
14852 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14853
14854 if (info->save_size)
14855 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14856
14857 if (info->reg_size != 4)
14858 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14859
14860 fprintf (stderr, "\n");
9878760c 14861}
71f123ca
FS
14862
14863rtx
a2369ed3 14864rs6000_return_addr (int count, rtx frame)
71f123ca 14865{
a4f6c312
SS
14866 /* Currently we don't optimize very well between prolog and body
14867 code and for PIC code the code can be actually quite bad, so
14868 don't try to be too clever here. */
f1384257 14869 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14870 {
14871 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14872
14873 return
14874 gen_rtx_MEM
14875 (Pmode,
14876 memory_address
14877 (Pmode,
14878 plus_constant (copy_to_reg
14879 (gen_rtx_MEM (Pmode,
14880 memory_address (Pmode, frame))),
14881 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14882 }
14883
8c29550d 14884 cfun->machine->ra_need_lr = 1;
1de43f85 14885 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14886}
14887
5e1bf043
DJ
14888/* Say whether a function is a candidate for sibcall handling or not.
14889 We do not allow indirect calls to be optimized into sibling calls.
14890 Also, we can't do it if there are any vector parameters; there's
14891 nowhere to put the VRsave code so it works; note that functions with
14892 vector parameters are required to have a prototype, so the argument
14893 type info must be available here. (The tail recursion case can work
14894 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14895static bool
a2369ed3 14896rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14897{
14898 tree type;
4977bab6 14899 if (decl)
5e1bf043
DJ
14900 {
14901 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14902 {
4977bab6 14903 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14904 type; type = TREE_CHAIN (type))
14905 {
c15b529f 14906 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14907 return false;
5e1bf043 14908 }
c4ad648e 14909 }
5e1bf043 14910 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14911 || ((*targetm.binds_local_p) (decl)
14912 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14913 {
4977bab6 14914 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14915
14916 if (!lookup_attribute ("longcall", attr_list)
14917 || lookup_attribute ("shortcall", attr_list))
4977bab6 14918 return true;
2bcc50d0 14919 }
5e1bf043 14920 }
4977bab6 14921 return false;
5e1bf043
DJ
14922}
14923
e7e64a25
AS
14924/* NULL if INSN insn is valid within a low-overhead loop.
14925 Otherwise return why doloop cannot be applied.
9419649c
DE
14926 PowerPC uses the COUNT register for branch on table instructions. */
14927
e7e64a25 14928static const char *
3101faab 14929rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14930{
14931 if (CALL_P (insn))
e7e64a25 14932 return "Function call in the loop.";
9419649c
DE
14933
14934 if (JUMP_P (insn)
14935 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14936 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14937 return "Computed branch in the loop.";
9419649c 14938
e7e64a25 14939 return NULL;
9419649c
DE
14940}
14941
71f123ca 14942static int
863d938c 14943rs6000_ra_ever_killed (void)
71f123ca
FS
14944{
14945 rtx top;
5e1bf043
DJ
14946 rtx reg;
14947 rtx insn;
71f123ca 14948
dd292d0a 14949 if (current_function_is_thunk)
71f123ca 14950 return 0;
eb0424da 14951
36f7e964
AH
14952 /* regs_ever_live has LR marked as used if any sibcalls are present,
14953 but this should not force saving and restoring in the
14954 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14955 clobbers LR, so that is inappropriate. */
36f7e964 14956
5e1bf043
DJ
14957 /* Also, the prologue can generate a store into LR that
14958 doesn't really count, like this:
36f7e964 14959
5e1bf043
DJ
14960 move LR->R0
14961 bcl to set PIC register
14962 move LR->R31
14963 move R0->LR
36f7e964
AH
14964
14965 When we're called from the epilogue, we need to avoid counting
14966 this as a store. */
f676971a 14967
71f123ca
FS
14968 push_topmost_sequence ();
14969 top = get_insns ();
14970 pop_topmost_sequence ();
1de43f85 14971 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14972
5e1bf043
DJ
14973 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14974 {
14975 if (INSN_P (insn))
14976 {
022123e6
AM
14977 if (CALL_P (insn))
14978 {
14979 if (!SIBLING_CALL_P (insn))
14980 return 1;
14981 }
1de43f85 14982 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14983 return 1;
36f7e964
AH
14984 else if (set_of (reg, insn) != NULL_RTX
14985 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14986 return 1;
14987 }
14988 }
14989 return 0;
71f123ca 14990}
4697a36c 14991\f
9ebbca7d 14992/* Emit instructions needed to load the TOC register.
c7ca610e 14993 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14994 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14995
14996void
a2369ed3 14997rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14998{
6fb5fa3c 14999 rtx dest;
1db02437 15000 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15001
7f970b70 15002 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15003 {
7f970b70 15004 char buf[30];
e65a3857 15005 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15006
15007 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15008 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15009 if (flag_pic == 2)
15010 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15011 else
15012 got = rs6000_got_sym ();
15013 tmp1 = tmp2 = dest;
15014 if (!fromprolog)
15015 {
15016 tmp1 = gen_reg_rtx (Pmode);
15017 tmp2 = gen_reg_rtx (Pmode);
15018 }
6fb5fa3c
DB
15019 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15020 emit_move_insn (tmp1,
1de43f85 15021 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15022 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15023 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15024 }
15025 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15026 {
6fb5fa3c 15027 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15028 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15029 }
15030 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15031 {
15032 char buf[30];
20b71b17
AM
15033 rtx temp0 = (fromprolog
15034 ? gen_rtx_REG (Pmode, 0)
15035 : gen_reg_rtx (Pmode));
20b71b17 15036
20b71b17
AM
15037 if (fromprolog)
15038 {
ccbca5e4 15039 rtx symF, symL;
38c1f2d7 15040
20b71b17
AM
15041 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15042 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15043
20b71b17
AM
15044 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15045 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15046
6fb5fa3c
DB
15047 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15048 emit_move_insn (dest,
1de43f85 15049 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15050 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15051 }
15052 else
20b71b17
AM
15053 {
15054 rtx tocsym;
20b71b17
AM
15055
15056 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15057 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15058 emit_move_insn (dest,
1de43f85 15059 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15060 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15061 }
6fb5fa3c 15062 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15063 }
20b71b17
AM
15064 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15065 {
15066 /* This is for AIX code running in non-PIC ELF32. */
15067 char buf[30];
15068 rtx realsym;
15069 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15070 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15071
6fb5fa3c
DB
15072 emit_insn (gen_elf_high (dest, realsym));
15073 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15074 }
37409796 15075 else
9ebbca7d 15076 {
37409796 15077 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15078
9ebbca7d 15079 if (TARGET_32BIT)
6fb5fa3c 15080 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15081 else
6fb5fa3c 15082 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15083 }
15084}
15085
d1d0c603
JJ
15086/* Emit instructions to restore the link register after determining where
15087 its value has been stored. */
15088
15089void
15090rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15091{
15092 rs6000_stack_t *info = rs6000_stack_info ();
15093 rtx operands[2];
15094
15095 operands[0] = source;
15096 operands[1] = scratch;
15097
15098 if (info->lr_save_p)
15099 {
15100 rtx frame_rtx = stack_pointer_rtx;
15101 HOST_WIDE_INT sp_offset = 0;
15102 rtx tmp;
15103
15104 if (frame_pointer_needed
15105 || current_function_calls_alloca
15106 || info->total_size > 32767)
15107 {
0be76840 15108 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15109 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15110 frame_rtx = operands[1];
15111 }
15112 else if (info->push_p)
15113 sp_offset = info->total_size;
15114
15115 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15116 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15117 emit_move_insn (tmp, operands[0]);
15118 }
15119 else
1de43f85 15120 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15121}
15122
4862826d 15123static GTY(()) alias_set_type set = -1;
f103e34d 15124
4862826d 15125alias_set_type
863d938c 15126get_TOC_alias_set (void)
9ebbca7d 15127{
f103e34d
GK
15128 if (set == -1)
15129 set = new_alias_set ();
15130 return set;
f676971a 15131}
9ebbca7d 15132
c1207243 15133/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15134 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15135 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15136#if TARGET_ELF
3c9eb5f4 15137static int
f676971a 15138uses_TOC (void)
9ebbca7d 15139{
c4501e62 15140 rtx insn;
38c1f2d7 15141
c4501e62
JJ
15142 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15143 if (INSN_P (insn))
15144 {
15145 rtx pat = PATTERN (insn);
15146 int i;
9ebbca7d 15147
f676971a 15148 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15149 for (i = 0; i < XVECLEN (pat, 0); i++)
15150 {
15151 rtx sub = XVECEXP (pat, 0, i);
15152 if (GET_CODE (sub) == USE)
15153 {
15154 sub = XEXP (sub, 0);
15155 if (GET_CODE (sub) == UNSPEC
15156 && XINT (sub, 1) == UNSPEC_TOC)
15157 return 1;
15158 }
15159 }
15160 }
15161 return 0;
9ebbca7d 15162}
c954844a 15163#endif
38c1f2d7 15164
9ebbca7d 15165rtx
f676971a 15166create_TOC_reference (rtx symbol)
9ebbca7d 15167{
b3a13419 15168 if (!can_create_pseudo_p ())
6fb5fa3c 15169 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15170 return gen_rtx_PLUS (Pmode,
a8a05998 15171 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15172 gen_rtx_CONST (Pmode,
15173 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15174 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15175}
38c1f2d7 15176
fc4767bb
JJ
15177/* If _Unwind_* has been called from within the same module,
15178 toc register is not guaranteed to be saved to 40(1) on function
15179 entry. Save it there in that case. */
c7ca610e 15180
9ebbca7d 15181void
863d938c 15182rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15183{
15184 rtx mem;
15185 rtx stack_top = gen_reg_rtx (Pmode);
15186 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15187 rtx opcode = gen_reg_rtx (SImode);
15188 rtx tocompare = gen_reg_rtx (SImode);
15189 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15190
8308679f 15191 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15192 emit_move_insn (stack_top, mem);
15193
8308679f
DE
15194 mem = gen_frame_mem (Pmode,
15195 gen_rtx_PLUS (Pmode, stack_top,
15196 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15197 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15198 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15199 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15200 : 0xE8410028, SImode));
9ebbca7d 15201
fc4767bb 15202 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15203 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15204 no_toc_save_needed);
9ebbca7d 15205
8308679f
DE
15206 mem = gen_frame_mem (Pmode,
15207 gen_rtx_PLUS (Pmode, stack_top,
15208 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15209 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15210 emit_label (no_toc_save_needed);
9ebbca7d 15211}
38c1f2d7 15212\f
0be76840
DE
15213/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15214 and the change to the stack pointer. */
ba4828e0 15215
9ebbca7d 15216static void
863d938c 15217rs6000_emit_stack_tie (void)
9ebbca7d 15218{
0be76840
DE
15219 rtx mem = gen_frame_mem (BLKmode,
15220 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15221
9ebbca7d
GK
15222 emit_insn (gen_stack_tie (mem));
15223}
38c1f2d7 15224
9ebbca7d
GK
15225/* Emit the correct code for allocating stack space, as insns.
15226 If COPY_R12, make sure a copy of the old frame is left in r12.
15227 The generated code may use hard register 0 as a temporary. */
15228
15229static void
a2369ed3 15230rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 15231{
9ebbca7d
GK
15232 rtx insn;
15233 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15234 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15235 rtx todec = gen_int_mode (-size, Pmode);
15236
15237 if (INTVAL (todec) != -size)
15238 {
d4ee4d25 15239 warning (0, "stack frame too large");
61168ff1
RS
15240 emit_insn (gen_trap ());
15241 return;
15242 }
a157febd
GK
15243
15244 if (current_function_limit_stack)
15245 {
15246 if (REG_P (stack_limit_rtx)
f676971a 15247 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15248 && REGNO (stack_limit_rtx) <= 31)
15249 {
5b71a4e7 15250 emit_insn (TARGET_32BIT
9ebbca7d
GK
15251 ? gen_addsi3 (tmp_reg,
15252 stack_limit_rtx,
15253 GEN_INT (size))
15254 : gen_adddi3 (tmp_reg,
15255 stack_limit_rtx,
15256 GEN_INT (size)));
5b71a4e7 15257
9ebbca7d
GK
15258 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15259 const0_rtx));
a157febd
GK
15260 }
15261 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15262 && TARGET_32BIT
f607bc57 15263 && DEFAULT_ABI == ABI_V4)
a157febd 15264 {
9ebbca7d 15265 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15266 gen_rtx_PLUS (Pmode,
15267 stack_limit_rtx,
9ebbca7d 15268 GEN_INT (size)));
5b71a4e7 15269
9ebbca7d
GK
15270 emit_insn (gen_elf_high (tmp_reg, toload));
15271 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15272 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15273 const0_rtx));
a157febd
GK
15274 }
15275 else
d4ee4d25 15276 warning (0, "stack limit expression is not supported");
a157febd
GK
15277 }
15278
9ebbca7d
GK
15279 if (copy_r12 || ! TARGET_UPDATE)
15280 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15281
38c1f2d7
MM
15282 if (TARGET_UPDATE)
15283 {
9ebbca7d 15284 if (size > 32767)
38c1f2d7 15285 {
9ebbca7d 15286 /* Need a note here so that try_split doesn't get confused. */
9390387d 15287 if (get_last_insn () == NULL_RTX)
2e040219 15288 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15289 insn = emit_move_insn (tmp_reg, todec);
15290 try_split (PATTERN (insn), insn, 0);
15291 todec = tmp_reg;
38c1f2d7 15292 }
5b71a4e7
DE
15293
15294 insn = emit_insn (TARGET_32BIT
15295 ? gen_movsi_update (stack_reg, stack_reg,
15296 todec, stack_reg)
c4ad648e 15297 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15298 todec, stack_reg));
38c1f2d7
MM
15299 }
15300 else
15301 {
5b71a4e7
DE
15302 insn = emit_insn (TARGET_32BIT
15303 ? gen_addsi3 (stack_reg, stack_reg, todec)
15304 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15305 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15306 gen_rtx_REG (Pmode, 12));
15307 }
f676971a 15308
9ebbca7d 15309 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15310 REG_NOTES (insn) =
9ebbca7d 15311 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15312 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15313 gen_rtx_PLUS (Pmode, stack_reg,
15314 GEN_INT (-size))),
15315 REG_NOTES (insn));
15316}
15317
a4f6c312
SS
15318/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15319 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15320 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15321 deduce these equivalences by itself so it wasn't necessary to hold
15322 its hand so much. */
9ebbca7d
GK
15323
15324static void
f676971a 15325rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15326 rtx reg2, rtx rreg)
9ebbca7d
GK
15327{
15328 rtx real, temp;
15329
e56c4463
JL
15330 /* copy_rtx will not make unique copies of registers, so we need to
15331 ensure we don't have unwanted sharing here. */
15332 if (reg == reg2)
15333 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15334
15335 if (reg == rreg)
15336 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15337
9ebbca7d
GK
15338 real = copy_rtx (PATTERN (insn));
15339
89e7058f
AH
15340 if (reg2 != NULL_RTX)
15341 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15342
15343 real = replace_rtx (real, reg,
9ebbca7d
GK
15344 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15345 STACK_POINTER_REGNUM),
15346 GEN_INT (val)));
f676971a 15347
9ebbca7d
GK
15348 /* We expect that 'real' is either a SET or a PARALLEL containing
15349 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15350 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15351
15352 if (GET_CODE (real) == SET)
15353 {
15354 rtx set = real;
f676971a 15355
9ebbca7d
GK
15356 temp = simplify_rtx (SET_SRC (set));
15357 if (temp)
15358 SET_SRC (set) = temp;
15359 temp = simplify_rtx (SET_DEST (set));
15360 if (temp)
15361 SET_DEST (set) = temp;
15362 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15363 {
9ebbca7d
GK
15364 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15365 if (temp)
15366 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15367 }
38c1f2d7 15368 }
37409796 15369 else
9ebbca7d
GK
15370 {
15371 int i;
37409796
NS
15372
15373 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15374 for (i = 0; i < XVECLEN (real, 0); i++)
15375 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15376 {
15377 rtx set = XVECEXP (real, 0, i);
f676971a 15378
9ebbca7d
GK
15379 temp = simplify_rtx (SET_SRC (set));
15380 if (temp)
15381 SET_SRC (set) = temp;
15382 temp = simplify_rtx (SET_DEST (set));
15383 if (temp)
15384 SET_DEST (set) = temp;
15385 if (GET_CODE (SET_DEST (set)) == MEM)
15386 {
15387 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15388 if (temp)
15389 XEXP (SET_DEST (set), 0) = temp;
15390 }
15391 RTX_FRAME_RELATED_P (set) = 1;
15392 }
15393 }
c19de7aa 15394
9ebbca7d
GK
15395 RTX_FRAME_RELATED_P (insn) = 1;
15396 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15397 real,
15398 REG_NOTES (insn));
38c1f2d7
MM
15399}
15400
00b960c7
AH
15401/* Returns an insn that has a vrsave set operation with the
15402 appropriate CLOBBERs. */
15403
15404static rtx
a2369ed3 15405generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15406{
15407 int nclobs, i;
15408 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15409 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15410
a004eb82
AH
15411 clobs[0]
15412 = gen_rtx_SET (VOIDmode,
15413 vrsave,
15414 gen_rtx_UNSPEC_VOLATILE (SImode,
15415 gen_rtvec (2, reg, vrsave),
3aca4bff 15416 UNSPECV_SET_VRSAVE));
00b960c7
AH
15417
15418 nclobs = 1;
15419
9aa86737
AH
15420 /* We need to clobber the registers in the mask so the scheduler
15421 does not move sets to VRSAVE before sets of AltiVec registers.
15422
15423 However, if the function receives nonlocal gotos, reload will set
15424 all call saved registers live. We will end up with:
15425
15426 (set (reg 999) (mem))
15427 (parallel [ (set (reg vrsave) (unspec blah))
15428 (clobber (reg 999))])
15429
15430 The clobber will cause the store into reg 999 to be dead, and
15431 flow will attempt to delete an epilogue insn. In this case, we
15432 need an unspec use/set of the register. */
00b960c7
AH
15433
15434 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15435 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15436 {
15437 if (!epiloguep || call_used_regs [i])
15438 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15439 gen_rtx_REG (V4SImode, i));
15440 else
15441 {
15442 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15443
15444 clobs[nclobs++]
a004eb82
AH
15445 = gen_rtx_SET (VOIDmode,
15446 reg,
15447 gen_rtx_UNSPEC (V4SImode,
15448 gen_rtvec (1, reg), 27));
9aa86737
AH
15449 }
15450 }
00b960c7
AH
15451
15452 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15453
15454 for (i = 0; i < nclobs; ++i)
15455 XVECEXP (insn, 0, i) = clobs[i];
15456
15457 return insn;
15458}
15459
89e7058f
AH
15460/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15461 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15462
15463static void
f676971a 15464emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15465 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15466{
15467 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15468 rtx replacea, replaceb;
15469
15470 int_rtx = GEN_INT (offset);
15471
15472 /* Some cases that need register indexed addressing. */
15473 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15474 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15475 || (TARGET_SPE_ABI
15476 && SPE_VECTOR_MODE (mode)
15477 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15478 {
15479 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15480 flow path of instructions in the prologue. */
89e7058f
AH
15481 offset_rtx = gen_rtx_REG (Pmode, 11);
15482 emit_move_insn (offset_rtx, int_rtx);
15483
15484 replacea = offset_rtx;
15485 replaceb = int_rtx;
15486 }
15487 else
15488 {
15489 offset_rtx = int_rtx;
15490 replacea = NULL_RTX;
15491 replaceb = NULL_RTX;
15492 }
15493
15494 reg = gen_rtx_REG (mode, regno);
15495 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15496 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15497
15498 insn = emit_move_insn (mem, reg);
15499
15500 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15501}
15502
a3170dc6
AH
15503/* Emit an offset memory reference suitable for a frame store, while
15504 converting to a valid addressing mode. */
15505
15506static rtx
a2369ed3 15507gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15508{
15509 rtx int_rtx, offset_rtx;
15510
15511 int_rtx = GEN_INT (offset);
15512
4d4cbc0e 15513 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15514 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15515 {
15516 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15517 emit_move_insn (offset_rtx, int_rtx);
15518 }
15519 else
15520 offset_rtx = int_rtx;
15521
0be76840 15522 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15523}
15524
6d0a8091
DJ
15525/* Look for user-defined global regs. We should not save and restore these,
15526 and cannot use stmw/lmw if there are any in its range. */
15527
15528static bool
15529no_global_regs_above (int first_greg)
15530{
15531 int i;
15532 for (i = 0; i < 32 - first_greg; i++)
15533 if (global_regs[first_greg + i])
15534 return false;
15535 return true;
15536}
15537
699c914a
MS
15538#ifndef TARGET_FIX_AND_CONTINUE
15539#define TARGET_FIX_AND_CONTINUE 0
15540#endif
15541
52ff33d0
NF
15542/* Determine whether the gp REG is really used. */
15543
15544static bool
15545rs6000_reg_live_or_pic_offset_p (int reg)
15546{
6fb5fa3c 15547 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15548 && (!call_used_regs[reg]
15549 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15550 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15551 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15552 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15553 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15554}
15555
9ebbca7d
GK
15556/* Emit function prologue as insns. */
15557
9878760c 15558void
863d938c 15559rs6000_emit_prologue (void)
9878760c 15560{
4697a36c 15561 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15562 enum machine_mode reg_mode = Pmode;
327e5343 15563 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15564 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15565 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15566 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15567 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15568 rtx insn;
15569 int saving_FPRs_inline;
15570 int using_store_multiple;
15571 HOST_WIDE_INT sp_offset = 0;
f676971a 15572
699c914a
MS
15573 if (TARGET_FIX_AND_CONTINUE)
15574 {
15575 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15576 address by modifying the first 5 instructions of the function
699c914a
MS
15577 to branch to the overriding function. This is necessary to
15578 permit function pointers that point to the old function to
15579 actually forward to the new function. */
15580 emit_insn (gen_nop ());
15581 emit_insn (gen_nop ());
de2ab0ca 15582 emit_insn (gen_nop ());
699c914a
MS
15583 emit_insn (gen_nop ());
15584 emit_insn (gen_nop ());
15585 }
15586
15587 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15588 {
15589 reg_mode = V2SImode;
15590 reg_size = 8;
15591 }
a3170dc6 15592
9ebbca7d 15593 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15594 && (!TARGET_SPE_ABI
15595 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15596 && info->first_gp_reg_save < 31
15597 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15598 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15599 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15600 || current_function_calls_eh_return
8c29550d 15601 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15602
15603 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15604 if (! WORLD_SAVE_P (info)
15605 && info->push_p
acd0b319
AM
15606 && (DEFAULT_ABI == ABI_V4
15607 || current_function_calls_eh_return))
9ebbca7d
GK
15608 {
15609 if (info->total_size < 32767)
15610 sp_offset = info->total_size;
15611 else
15612 frame_reg_rtx = frame_ptr_rtx;
f676971a 15613 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15614 (frame_reg_rtx != sp_reg_rtx
15615 && (info->cr_save_p
15616 || info->lr_save_p
15617 || info->first_fp_reg_save < 64
15618 || info->first_gp_reg_save < 32
15619 )));
15620 if (frame_reg_rtx != sp_reg_rtx)
15621 rs6000_emit_stack_tie ();
15622 }
15623
d62294f5 15624 /* Handle world saves specially here. */
f57fe068 15625 if (WORLD_SAVE_P (info))
d62294f5
FJ
15626 {
15627 int i, j, sz;
15628 rtx treg;
15629 rtvec p;
22fa69da 15630 rtx reg0;
d62294f5
FJ
15631
15632 /* save_world expects lr in r0. */
22fa69da 15633 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15634 if (info->lr_save_p)
c4ad648e 15635 {
22fa69da 15636 insn = emit_move_insn (reg0,
1de43f85 15637 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15638 RTX_FRAME_RELATED_P (insn) = 1;
15639 }
d62294f5
FJ
15640
15641 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15642 assumptions about the offsets of various bits of the stack
992d08b1 15643 frame. */
37409796
NS
15644 gcc_assert (info->gp_save_offset == -220
15645 && info->fp_save_offset == -144
15646 && info->lr_save_offset == 8
15647 && info->cr_save_offset == 4
15648 && info->push_p
15649 && info->lr_save_p
15650 && (!current_function_calls_eh_return
15651 || info->ehrd_offset == -432)
15652 && info->vrsave_save_offset == -224
22fa69da 15653 && info->altivec_save_offset == -416);
d62294f5
FJ
15654
15655 treg = gen_rtx_REG (SImode, 11);
15656 emit_move_insn (treg, GEN_INT (-info->total_size));
15657
15658 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15659 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15660
15661 /* Preserve CR2 for save_world prologues */
22fa69da 15662 sz = 5;
d62294f5
FJ
15663 sz += 32 - info->first_gp_reg_save;
15664 sz += 64 - info->first_fp_reg_save;
15665 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15666 p = rtvec_alloc (sz);
15667 j = 0;
15668 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15669 gen_rtx_REG (SImode,
1de43f85 15670 LR_REGNO));
d62294f5 15671 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15672 gen_rtx_SYMBOL_REF (Pmode,
15673 "*save_world"));
d62294f5 15674 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15675 properly. */
15676 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15677 {
15678 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15679 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15680 GEN_INT (info->fp_save_offset
15681 + sp_offset + 8 * i));
0be76840 15682 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15683
15684 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15685 }
d62294f5 15686 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15687 {
15688 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15689 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15690 GEN_INT (info->altivec_save_offset
15691 + sp_offset + 16 * i));
0be76840 15692 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15693
15694 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15695 }
d62294f5 15696 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15697 {
15698 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15699 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15700 GEN_INT (info->gp_save_offset
15701 + sp_offset + reg_size * i));
0be76840 15702 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15703
15704 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15705 }
15706
15707 {
15708 /* CR register traditionally saved as CR2. */
15709 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15710 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15711 GEN_INT (info->cr_save_offset
15712 + sp_offset));
0be76840 15713 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15714
15715 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15716 }
22fa69da
GK
15717 /* Explain about use of R0. */
15718 if (info->lr_save_p)
15719 {
15720 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15721 GEN_INT (info->lr_save_offset
15722 + sp_offset));
15723 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15724
22fa69da
GK
15725 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15726 }
15727 /* Explain what happens to the stack pointer. */
15728 {
15729 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15730 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15731 }
d62294f5
FJ
15732
15733 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15734 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15735 treg, GEN_INT (-info->total_size));
15736 sp_offset = info->total_size;
d62294f5
FJ
15737 }
15738
9ebbca7d 15739 /* If we use the link register, get it into r0. */
f57fe068 15740 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15741 {
52ff33d0
NF
15742 rtx addr, reg, mem;
15743
f8a57be8 15744 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15745 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15746 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15747
15748 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15749 GEN_INT (info->lr_save_offset + sp_offset));
15750 reg = gen_rtx_REG (Pmode, 0);
15751 mem = gen_rtx_MEM (Pmode, addr);
15752 /* This should not be of rs6000_sr_alias_set, because of
15753 __builtin_return_address. */
15754
15755 insn = emit_move_insn (mem, reg);
15756 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15757 NULL_RTX, NULL_RTX);
f8a57be8 15758 }
9ebbca7d
GK
15759
15760 /* If we need to save CR, put it into r12. */
f57fe068 15761 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15762 {
f8a57be8 15763 rtx set;
f676971a 15764
9ebbca7d 15765 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15766 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15767 RTX_FRAME_RELATED_P (insn) = 1;
15768 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15769 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15770 But that's OK. All we have to do is specify that _one_ condition
15771 code register is saved in this stack slot. The thrower's epilogue
15772 will then restore all the call-saved registers.
15773 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15774 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15775 gen_rtx_REG (SImode, CR2_REGNO));
15776 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15777 set,
15778 REG_NOTES (insn));
9ebbca7d
GK
15779 }
15780
a4f6c312
SS
15781 /* Do any required saving of fpr's. If only one or two to save, do
15782 it ourselves. Otherwise, call function. */
f57fe068 15783 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15784 {
15785 int i;
15786 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15787 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15788 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15789 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15790 info->first_fp_reg_save + i,
15791 info->fp_save_offset + sp_offset + 8 * i,
15792 info->total_size);
9ebbca7d 15793 }
f57fe068 15794 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15795 {
15796 int i;
15797 char rname[30];
520a57c8 15798 const char *alloc_rname;
9ebbca7d
GK
15799 rtvec p;
15800 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15801
15802 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15803 gen_rtx_REG (Pmode,
1de43f85 15804 LR_REGNO));
9ebbca7d
GK
15805 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15806 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15807 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15808 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15809 gen_rtx_SYMBOL_REF (Pmode,
15810 alloc_rname));
15811 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15812 {
15813 rtx addr, reg, mem;
15814 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15815 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15816 GEN_INT (info->fp_save_offset
9ebbca7d 15817 + sp_offset + 8*i));
0be76840 15818 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15819
15820 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15821 }
15822 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15823 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15824 NULL_RTX, NULL_RTX);
15825 }
b6c9286a 15826
9ebbca7d
GK
15827 /* Save GPRs. This is done as a PARALLEL if we are using
15828 the store-multiple instructions. */
f57fe068 15829 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15830 {
308c142a 15831 rtvec p;
9ebbca7d
GK
15832 int i;
15833 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15834 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15835 {
15836 rtx addr, reg, mem;
15837 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15838 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15839 GEN_INT (info->gp_save_offset
15840 + sp_offset
9ebbca7d 15841 + reg_size * i));
0be76840 15842 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15843
15844 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15845 }
15846 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15847 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15848 NULL_RTX, NULL_RTX);
b6c9286a 15849 }
52ff33d0
NF
15850 else if (!WORLD_SAVE_P (info)
15851 && TARGET_SPE_ABI
15852 && info->spe_64bit_regs_used != 0
15853 && info->first_gp_reg_save != 32)
15854 {
15855 int i;
15856 rtx spe_save_area_ptr;
15857 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15858 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15859 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15860
15861 /* Determine whether we can address all of the registers that need
15862 to be saved with an offset from the stack pointer that fits in
15863 the small const field for SPE memory instructions. */
15864 int spe_regs_addressable_via_sp
15865 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15866 + (32 - info->first_gp_reg_save - 1) * reg_size);
15867 int spe_offset;
15868
15869 if (spe_regs_addressable_via_sp)
15870 {
30895f30 15871 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15872 spe_offset = info->spe_gp_save_offset + sp_offset;
15873 }
15874 else
15875 {
15876 /* Make r11 point to the start of the SPE save area. We need
15877 to be careful here if r11 is holding the static chain. If
15878 it is, then temporarily save it in r0. We would use r0 as
15879 our base register here, but using r0 as a base register in
15880 loads and stores means something different from what we
15881 would like. */
15882 if (using_static_chain_p)
15883 {
15884 rtx r0 = gen_rtx_REG (Pmode, 0);
15885
15886 gcc_assert (info->first_gp_reg_save > 11);
15887
15888 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15889 }
15890
15891 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15892 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15893 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15894
15895 spe_offset = 0;
15896 }
15897
15898 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15899 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15900 {
15901 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15902 rtx offset, addr, mem;
15903
15904 /* We're doing all this to ensure that the offset fits into
15905 the immediate offset of 'evstdd'. */
15906 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15907
15908 offset = GEN_INT (reg_size * i + spe_offset);
15909 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15910 mem = gen_rtx_MEM (V2SImode, addr);
15911
15912 insn = emit_move_insn (mem, reg);
15913
15914 rs6000_frame_related (insn, spe_save_area_ptr,
15915 info->spe_gp_save_offset
15916 + sp_offset + reg_size * i,
15917 offset, const0_rtx);
15918 }
15919
15920 /* Move the static chain pointer back. */
15921 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15922 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15923 }
f57fe068 15924 else if (!WORLD_SAVE_P (info))
b6c9286a 15925 {
9ebbca7d
GK
15926 int i;
15927 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15928 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15929 {
15930 rtx addr, reg, mem;
15931 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15932
52ff33d0
NF
15933 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15934 GEN_INT (info->gp_save_offset
15935 + sp_offset
15936 + reg_size * i));
15937 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15938
52ff33d0
NF
15939 insn = emit_move_insn (mem, reg);
15940 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15941 NULL_RTX, NULL_RTX);
15942 }
9ebbca7d
GK
15943 }
15944
83720594
RH
15945 /* ??? There's no need to emit actual instructions here, but it's the
15946 easiest way to get the frame unwind information emitted. */
22fa69da 15947 if (current_function_calls_eh_return)
83720594 15948 {
78e1b90d
DE
15949 unsigned int i, regno;
15950
fc4767bb
JJ
15951 /* In AIX ABI we need to pretend we save r2 here. */
15952 if (TARGET_AIX)
15953 {
15954 rtx addr, reg, mem;
15955
15956 reg = gen_rtx_REG (reg_mode, 2);
15957 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15958 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15959 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15960
15961 insn = emit_move_insn (mem, reg);
f676971a 15962 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15963 NULL_RTX, NULL_RTX);
15964 PATTERN (insn) = gen_blockage ();
15965 }
15966
83720594
RH
15967 for (i = 0; ; ++i)
15968 {
83720594
RH
15969 regno = EH_RETURN_DATA_REGNO (i);
15970 if (regno == INVALID_REGNUM)
15971 break;
15972
89e7058f
AH
15973 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15974 info->ehrd_offset + sp_offset
15975 + reg_size * (int) i,
15976 info->total_size);
83720594
RH
15977 }
15978 }
15979
9ebbca7d 15980 /* Save CR if we use any that must be preserved. */
f57fe068 15981 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15982 {
15983 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15984 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15985 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15986 /* See the large comment above about why CR2_REGNO is used. */
15987 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15988
9ebbca7d
GK
15989 /* If r12 was used to hold the original sp, copy cr into r0 now
15990 that it's free. */
15991 if (REGNO (frame_reg_rtx) == 12)
15992 {
f8a57be8
GK
15993 rtx set;
15994
9ebbca7d 15995 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15996 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15997 RTX_FRAME_RELATED_P (insn) = 1;
15998 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15999 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16000 set,
16001 REG_NOTES (insn));
f676971a 16002
9ebbca7d
GK
16003 }
16004 insn = emit_move_insn (mem, cr_save_rtx);
16005
f676971a 16006 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16007 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16008 }
16009
f676971a 16010 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16011 for which it was done previously. */
f57fe068 16012 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 16013 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 16014 {
bcb2d701 16015 if (info->total_size < 32767)
2b2c2fe5 16016 sp_offset = info->total_size;
bcb2d701
EC
16017 else
16018 frame_reg_rtx = frame_ptr_rtx;
16019 rs6000_emit_allocate_stack (info->total_size,
16020 (frame_reg_rtx != sp_reg_rtx
16021 && ((info->altivec_size != 0)
16022 || (info->vrsave_mask != 0)
16023 )));
16024 if (frame_reg_rtx != sp_reg_rtx)
16025 rs6000_emit_stack_tie ();
2b2c2fe5 16026 }
9ebbca7d
GK
16027
16028 /* Set frame pointer, if needed. */
16029 if (frame_pointer_needed)
16030 {
7d5175e1 16031 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16032 sp_reg_rtx);
16033 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16034 }
9878760c 16035
2b2c2fe5
EC
16036 /* Save AltiVec registers if needed. Save here because the red zone does
16037 not include AltiVec registers. */
16038 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16039 {
16040 int i;
16041
16042 /* There should be a non inline version of this, for when we
16043 are saving lots of vector registers. */
16044 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16045 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16046 {
16047 rtx areg, savereg, mem;
16048 int offset;
16049
16050 offset = info->altivec_save_offset + sp_offset
16051 + 16 * (i - info->first_altivec_reg_save);
16052
16053 savereg = gen_rtx_REG (V4SImode, i);
16054
16055 areg = gen_rtx_REG (Pmode, 0);
16056 emit_move_insn (areg, GEN_INT (offset));
16057
16058 /* AltiVec addressing mode is [reg+reg]. */
16059 mem = gen_frame_mem (V4SImode,
16060 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16061
16062 insn = emit_move_insn (mem, savereg);
16063
16064 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16065 areg, GEN_INT (offset));
16066 }
16067 }
16068
16069 /* VRSAVE is a bit vector representing which AltiVec registers
16070 are used. The OS uses this to determine which vector
16071 registers to save on a context switch. We need to save
16072 VRSAVE on the stack frame, add whatever AltiVec registers we
16073 used in this function, and do the corresponding magic in the
16074 epilogue. */
16075
16076 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16077 && info->vrsave_mask != 0)
16078 {
16079 rtx reg, mem, vrsave;
16080 int offset;
16081
16082 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16083 as frame_reg_rtx and r11 as the static chain pointer for
16084 nested functions. */
16085 reg = gen_rtx_REG (SImode, 0);
16086 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16087 if (TARGET_MACHO)
16088 emit_insn (gen_get_vrsave_internal (reg));
16089 else
16090 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16091
16092 if (!WORLD_SAVE_P (info))
16093 {
16094 /* Save VRSAVE. */
16095 offset = info->vrsave_save_offset + sp_offset;
16096 mem = gen_frame_mem (SImode,
16097 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16098 GEN_INT (offset)));
16099 insn = emit_move_insn (mem, reg);
16100 }
16101
16102 /* Include the registers in the mask. */
16103 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16104
16105 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16106 }
16107
1db02437 16108 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16109 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16110 || (DEFAULT_ABI == ABI_V4
16111 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16112 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16113 {
16114 /* If emit_load_toc_table will use the link register, we need to save
16115 it. We use R12 for this purpose because emit_load_toc_table
16116 can use register 0. This allows us to use a plain 'blr' to return
16117 from the procedure more often. */
16118 int save_LR_around_toc_setup = (TARGET_ELF
16119 && DEFAULT_ABI != ABI_AIX
16120 && flag_pic
16121 && ! info->lr_save_p
16122 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16123 if (save_LR_around_toc_setup)
16124 {
1de43f85 16125 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16126
c4ad648e 16127 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16128 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16129
c4ad648e 16130 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16131
c4ad648e 16132 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16133 RTX_FRAME_RELATED_P (insn) = 1;
16134 }
16135 else
16136 rs6000_emit_load_toc_table (TRUE);
16137 }
ee890fe2 16138
fcce224d 16139#if TARGET_MACHO
ee890fe2
SS
16140 if (DEFAULT_ABI == ABI_DARWIN
16141 && flag_pic && current_function_uses_pic_offset_table)
16142 {
1de43f85 16143 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16144 rtx src = machopic_function_base_sym ();
ee890fe2 16145
6d0a8091
DJ
16146 /* Save and restore LR locally around this call (in R0). */
16147 if (!info->lr_save_p)
6fb5fa3c 16148 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16149
6fb5fa3c 16150 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16151
6fb5fa3c
DB
16152 emit_move_insn (gen_rtx_REG (Pmode,
16153 RS6000_PIC_OFFSET_TABLE_REGNUM),
16154 lr);
6d0a8091
DJ
16155
16156 if (!info->lr_save_p)
6fb5fa3c 16157 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16158 }
fcce224d 16159#endif
9ebbca7d
GK
16160}
16161
9ebbca7d 16162/* Write function prologue. */
a4f6c312 16163
08c148a8 16164static void
f676971a 16165rs6000_output_function_prologue (FILE *file,
a2369ed3 16166 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16167{
16168 rs6000_stack_t *info = rs6000_stack_info ();
16169
4697a36c
MM
16170 if (TARGET_DEBUG_STACK)
16171 debug_stack_info (info);
9878760c 16172
a4f6c312
SS
16173 /* Write .extern for any function we will call to save and restore
16174 fp values. */
16175 if (info->first_fp_reg_save < 64
16176 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16177 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16178 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16179 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16180 RESTORE_FP_SUFFIX);
9878760c 16181
c764f757
RK
16182 /* Write .extern for AIX common mode routines, if needed. */
16183 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16184 {
f6709c70
JW
16185 fputs ("\t.extern __mulh\n", file);
16186 fputs ("\t.extern __mull\n", file);
16187 fputs ("\t.extern __divss\n", file);
16188 fputs ("\t.extern __divus\n", file);
16189 fputs ("\t.extern __quoss\n", file);
16190 fputs ("\t.extern __quous\n", file);
c764f757
RK
16191 common_mode_defined = 1;
16192 }
9878760c 16193
9ebbca7d 16194 if (! HAVE_prologue)
979721f8 16195 {
9ebbca7d 16196 start_sequence ();
9dda4cc8 16197
a4f6c312
SS
16198 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16199 the "toplevel" insn chain. */
2e040219 16200 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16201 rs6000_emit_prologue ();
2e040219 16202 emit_note (NOTE_INSN_DELETED);
178c3eff 16203
a3c9585f 16204 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16205 {
16206 rtx insn;
16207 unsigned addr = 0;
16208 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16209 {
16210 INSN_ADDRESSES_NEW (insn, addr);
16211 addr += 4;
16212 }
16213 }
9dda4cc8 16214
9ebbca7d 16215 if (TARGET_DEBUG_STACK)
a4f6c312 16216 debug_rtx_list (get_insns (), 100);
c9d691e9 16217 final (get_insns (), file, FALSE);
9ebbca7d 16218 end_sequence ();
979721f8
MM
16219 }
16220
9ebbca7d
GK
16221 rs6000_pic_labelno++;
16222}
f676971a 16223
9ebbca7d 16224/* Emit function epilogue as insns.
9878760c 16225
9ebbca7d
GK
16226 At present, dwarf2out_frame_debug_expr doesn't understand
16227 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16228 anywhere in the epilogue. Most of the insns below would in any case
16229 need special notes to explain where r11 is in relation to the stack. */
9878760c 16230
9ebbca7d 16231void
a2369ed3 16232rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16233{
16234 rs6000_stack_t *info;
16235 int restoring_FPRs_inline;
16236 int using_load_multiple;
d296e02e 16237 int using_mtcr_multiple;
9ebbca7d
GK
16238 int use_backchain_to_restore_sp;
16239 int sp_offset = 0;
16240 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16241 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16242 enum machine_mode reg_mode = Pmode;
327e5343 16243 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16244 int i;
16245
c19de7aa
AH
16246 info = rs6000_stack_info ();
16247
16248 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16249 {
16250 reg_mode = V2SImode;
16251 reg_size = 8;
16252 }
16253
9ebbca7d 16254 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16255 && (!TARGET_SPE_ABI
16256 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16257 && info->first_gp_reg_save < 31
16258 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16259 restoring_FPRs_inline = (sibcall
83720594 16260 || current_function_calls_eh_return
9ebbca7d
GK
16261 || info->first_fp_reg_save == 64
16262 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16263 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16264 || current_function_calls_alloca
16265 || info->total_size > 32767);
d296e02e 16266 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16267 || rs6000_cpu == PROCESSOR_PPC603
16268 || rs6000_cpu == PROCESSOR_PPC750
16269 || optimize_size);
16270
f57fe068 16271 if (WORLD_SAVE_P (info))
d62294f5
FJ
16272 {
16273 int i, j;
16274 char rname[30];
16275 const char *alloc_rname;
16276 rtvec p;
16277
16278 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16279 stack slot (which is not likely to be our caller.)
16280 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16281 rest_world is similar, except any R10 parameter is ignored.
16282 The exception-handling stuff that was here in 2.95 is no
16283 longer necessary. */
d62294f5
FJ
16284
16285 p = rtvec_alloc (9
16286 + 1
f676971a 16287 + 32 - info->first_gp_reg_save
c4ad648e
AM
16288 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16289 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16290
c4ad648e
AM
16291 strcpy (rname, ((current_function_calls_eh_return) ?
16292 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16293 alloc_rname = ggc_strdup (rname);
16294
16295 j = 0;
16296 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16297 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16298 gen_rtx_REG (Pmode,
1de43f85 16299 LR_REGNO));
d62294f5 16300 RTVEC_ELT (p, j++)
c4ad648e 16301 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16302 /* The instruction pattern requires a clobber here;
c4ad648e 16303 it is shared with the restVEC helper. */
d62294f5 16304 RTVEC_ELT (p, j++)
c4ad648e 16305 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16306
16307 {
c4ad648e
AM
16308 /* CR register traditionally saved as CR2. */
16309 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16310 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16311 GEN_INT (info->cr_save_offset));
0be76840 16312 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16313
16314 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16315 }
16316
16317 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16318 {
16319 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16320 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16321 GEN_INT (info->gp_save_offset
16322 + reg_size * i));
0be76840 16323 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16324
16325 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16326 }
d62294f5 16327 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16328 {
16329 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16330 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16331 GEN_INT (info->altivec_save_offset
16332 + 16 * i));
0be76840 16333 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16334
16335 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16336 }
d62294f5 16337 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16338 {
16339 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16340 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16341 GEN_INT (info->fp_save_offset
16342 + 8 * i));
0be76840 16343 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16344
16345 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16346 }
d62294f5 16347 RTVEC_ELT (p, j++)
c4ad648e 16348 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16349 RTVEC_ELT (p, j++)
c4ad648e 16350 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16351 RTVEC_ELT (p, j++)
c4ad648e 16352 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16353 RTVEC_ELT (p, j++)
c4ad648e 16354 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16355 RTVEC_ELT (p, j++)
c4ad648e 16356 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16357 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16358
16359 return;
16360 }
16361
45b194f8
AM
16362 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16363 if (info->push_p)
2b2c2fe5 16364 sp_offset = info->total_size;
f676971a 16365
9aa86737
AH
16366 /* Restore AltiVec registers if needed. */
16367 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16368 {
16369 int i;
16370
16371 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16372 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16373 {
16374 rtx addr, areg, mem;
16375
16376 areg = gen_rtx_REG (Pmode, 0);
16377 emit_move_insn
16378 (areg, GEN_INT (info->altivec_save_offset
16379 + sp_offset
16380 + 16 * (i - info->first_altivec_reg_save)));
16381
16382 /* AltiVec addressing mode is [reg+reg]. */
16383 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16384 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16385
16386 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16387 }
16388 }
16389
2b2c2fe5
EC
16390 /* If we have a frame pointer, a call to alloca, or a large stack
16391 frame, restore the old stack pointer using the backchain. Otherwise,
16392 we know what size to update it with. */
16393 if (use_backchain_to_restore_sp)
16394 {
16395 /* Under V.4, don't reset the stack pointer until after we're done
16396 loading the saved registers. */
16397 if (DEFAULT_ABI == ABI_V4)
16398 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16399
16400 emit_move_insn (frame_reg_rtx,
16401 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16402 sp_offset = 0;
2b2c2fe5 16403 }
45b194f8
AM
16404 else if (info->push_p
16405 && DEFAULT_ABI != ABI_V4
16406 && !current_function_calls_eh_return)
2b2c2fe5 16407 {
45b194f8
AM
16408 emit_insn (TARGET_32BIT
16409 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16410 GEN_INT (info->total_size))
16411 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16412 GEN_INT (info->total_size)));
16413 sp_offset = 0;
2b2c2fe5
EC
16414 }
16415
554c2941
AM
16416 /* Restore VRSAVE if needed. */
16417 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16418 && info->vrsave_mask != 0)
16419 {
16420 rtx addr, mem, reg;
16421
16422 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16423 GEN_INT (info->vrsave_save_offset + sp_offset));
16424 mem = gen_frame_mem (SImode, addr);
16425 reg = gen_rtx_REG (SImode, 12);
16426 emit_move_insn (reg, mem);
16427
16428 emit_insn (generate_set_vrsave (reg, info, 1));
16429 }
16430
9ebbca7d
GK
16431 /* Get the old lr if we saved it. */
16432 if (info->lr_save_p)
b6c9286a 16433 {
a3170dc6
AH
16434 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16435 info->lr_save_offset + sp_offset);
ba4828e0 16436
9ebbca7d 16437 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16438 }
f676971a 16439
9ebbca7d
GK
16440 /* Get the old cr if we saved it. */
16441 if (info->cr_save_p)
16442 {
16443 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16444 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16445 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16446
9ebbca7d
GK
16447 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16448 }
f676971a 16449
9ebbca7d 16450 /* Set LR here to try to overlap restores below. */
4697a36c 16451 if (info->lr_save_p)
1de43f85 16452 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16453 gen_rtx_REG (Pmode, 0));
f676971a 16454
83720594
RH
16455 /* Load exception handler data registers, if needed. */
16456 if (current_function_calls_eh_return)
16457 {
78e1b90d
DE
16458 unsigned int i, regno;
16459
fc4767bb
JJ
16460 if (TARGET_AIX)
16461 {
16462 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16463 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16464 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16465
16466 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16467 }
16468
83720594
RH
16469 for (i = 0; ; ++i)
16470 {
a3170dc6 16471 rtx mem;
83720594
RH
16472
16473 regno = EH_RETURN_DATA_REGNO (i);
16474 if (regno == INVALID_REGNUM)
16475 break;
16476
a3170dc6
AH
16477 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16478 info->ehrd_offset + sp_offset
16479 + reg_size * (int) i);
83720594
RH
16480
16481 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16482 }
16483 }
f676971a 16484
9ebbca7d
GK
16485 /* Restore GPRs. This is done as a PARALLEL if we are using
16486 the load-multiple instructions. */
16487 if (using_load_multiple)
979721f8 16488 {
9ebbca7d
GK
16489 rtvec p;
16490 p = rtvec_alloc (32 - info->first_gp_reg_save);
16491 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16492 {
f676971a
EC
16493 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16494 GEN_INT (info->gp_save_offset
16495 + sp_offset
9ebbca7d 16496 + reg_size * i));
0be76840 16497 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16498
f676971a 16499 RTVEC_ELT (p, i) =
9ebbca7d
GK
16500 gen_rtx_SET (VOIDmode,
16501 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16502 mem);
979721f8 16503 }
9ebbca7d 16504 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16505 }
52ff33d0
NF
16506 else if (TARGET_SPE_ABI
16507 && info->spe_64bit_regs_used != 0
16508 && info->first_gp_reg_save != 32)
16509 {
52ff33d0
NF
16510 /* Determine whether we can address all of the registers that need
16511 to be saved with an offset from the stack pointer that fits in
16512 the small const field for SPE memory instructions. */
16513 int spe_regs_addressable_via_sp
16514 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16515 + (32 - info->first_gp_reg_save - 1) * reg_size);
16516 int spe_offset;
16517
16518 if (spe_regs_addressable_via_sp)
45b194f8 16519 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16520 else
16521 {
45b194f8 16522 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16523 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16524 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16525 There's no need to worry here because the static chain is passed
16526 anew to every function. */
45b194f8
AM
16527 if (frame_reg_rtx == sp_reg_rtx)
16528 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16529 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16530 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16531 /* Keep the invariant that frame_reg_rtx + sp_offset points
16532 at the top of the stack frame. */
16533 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16534
16535 spe_offset = 0;
16536 }
16537
16538 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16539 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16540 {
16541 rtx offset, addr, mem;
16542
16543 /* We're doing all this to ensure that the immediate offset
16544 fits into the immediate field of 'evldd'. */
16545 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16546
16547 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16548 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16549 mem = gen_rtx_MEM (V2SImode, addr);
16550
16551 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16552 mem);
16553 }
16554 }
9ebbca7d
GK
16555 else
16556 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16557 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16558 {
f676971a
EC
16559 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16560 GEN_INT (info->gp_save_offset
16561 + sp_offset
9ebbca7d 16562 + reg_size * i));
0be76840 16563 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16564
f676971a 16565 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16566 info->first_gp_reg_save + i), mem);
9ebbca7d 16567 }
9878760c 16568
9ebbca7d
GK
16569 /* Restore fpr's if we need to do it without calling a function. */
16570 if (restoring_FPRs_inline)
16571 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16572 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16573 && ! call_used_regs[info->first_fp_reg_save+i]))
16574 {
16575 rtx addr, mem;
16576 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16577 GEN_INT (info->fp_save_offset
16578 + sp_offset
a4f6c312 16579 + 8 * i));
0be76840 16580 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16581
f676971a 16582 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16583 info->first_fp_reg_save + i),
16584 mem);
16585 }
8d30c4ee 16586
9ebbca7d
GK
16587 /* If we saved cr, restore it here. Just those that were used. */
16588 if (info->cr_save_p)
979721f8 16589 {
9ebbca7d 16590 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16591 int count = 0;
f676971a 16592
d296e02e 16593 if (using_mtcr_multiple)
979721f8 16594 {
9ebbca7d 16595 for (i = 0; i < 8; i++)
6fb5fa3c 16596 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16597 count++;
37409796 16598 gcc_assert (count);
e35b9579
GK
16599 }
16600
d296e02e 16601 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16602 {
16603 rtvec p;
16604 int ndx;
f676971a 16605
e35b9579 16606 p = rtvec_alloc (count);
9ebbca7d 16607
e35b9579 16608 ndx = 0;
9ebbca7d 16609 for (i = 0; i < 8; i++)
6fb5fa3c 16610 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16611 {
16612 rtvec r = rtvec_alloc (2);
16613 RTVEC_ELT (r, 0) = r12_rtx;
16614 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16615 RTVEC_ELT (p, ndx) =
f676971a 16616 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16617 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16618 ndx++;
9ebbca7d
GK
16619 }
16620 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16621 gcc_assert (ndx == count);
979721f8
MM
16622 }
16623 else
9ebbca7d 16624 for (i = 0; i < 8; i++)
6fb5fa3c 16625 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16626 {
f676971a 16627 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16628 CR0_REGNO+i),
16629 r12_rtx));
979721f8 16630 }
979721f8
MM
16631 }
16632
9ebbca7d 16633 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16634 have been done. */
16635 if (frame_reg_rtx != sp_reg_rtx)
16636 {
16637 /* This blockage is needed so that sched doesn't decide to move
16638 the sp change before the register restores. */
16639 rs6000_emit_stack_tie ();
45b194f8
AM
16640 if (sp_offset != 0)
16641 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16642 GEN_INT (sp_offset)));
52ff33d0
NF
16643 else
16644 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16645 }
16646 else if (sp_offset != 0)
16647 emit_insn (TARGET_32BIT
16648 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16649 GEN_INT (sp_offset))
16650 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16651 GEN_INT (sp_offset)));
b6c9286a 16652
83720594
RH
16653 if (current_function_calls_eh_return)
16654 {
16655 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16656 emit_insn (TARGET_32BIT
83720594
RH
16657 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16658 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16659 }
16660
9ebbca7d
GK
16661 if (!sibcall)
16662 {
16663 rtvec p;
16664 if (! restoring_FPRs_inline)
16665 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16666 else
16667 p = rtvec_alloc (2);
b6c9286a 16668
e35b9579 16669 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16670 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16671 gen_rtx_REG (Pmode,
1de43f85 16672 LR_REGNO));
9ebbca7d
GK
16673
16674 /* If we have to restore more than two FP registers, branch to the
16675 restore function. It will return to our caller. */
16676 if (! restoring_FPRs_inline)
16677 {
16678 int i;
16679 char rname[30];
520a57c8 16680 const char *alloc_rname;
979721f8 16681
f676971a 16682 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16683 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16684 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16685 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16686 gen_rtx_SYMBOL_REF (Pmode,
16687 alloc_rname));
b6c9286a 16688
9ebbca7d
GK
16689 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16690 {
16691 rtx addr, mem;
16692 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16693 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16694 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16695
f676971a 16696 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16697 gen_rtx_SET (VOIDmode,
16698 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16699 mem);
b6c9286a
MM
16700 }
16701 }
f676971a 16702
9ebbca7d 16703 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16704 }
9878760c
RK
16705}
16706
16707/* Write function epilogue. */
16708
08c148a8 16709static void
f676971a 16710rs6000_output_function_epilogue (FILE *file,
a2369ed3 16711 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16712{
9ebbca7d 16713 if (! HAVE_epilogue)
9878760c 16714 {
9ebbca7d
GK
16715 rtx insn = get_last_insn ();
16716 /* If the last insn was a BARRIER, we don't have to write anything except
16717 the trace table. */
16718 if (GET_CODE (insn) == NOTE)
16719 insn = prev_nonnote_insn (insn);
16720 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16721 {
9ebbca7d
GK
16722 /* This is slightly ugly, but at least we don't have two
16723 copies of the epilogue-emitting code. */
16724 start_sequence ();
16725
16726 /* A NOTE_INSN_DELETED is supposed to be at the start
16727 and end of the "toplevel" insn chain. */
2e040219 16728 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16729 rs6000_emit_epilogue (FALSE);
2e040219 16730 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16731
a3c9585f 16732 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16733 {
16734 rtx insn;
16735 unsigned addr = 0;
16736 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16737 {
16738 INSN_ADDRESSES_NEW (insn, addr);
16739 addr += 4;
16740 }
16741 }
16742
9ebbca7d 16743 if (TARGET_DEBUG_STACK)
a4f6c312 16744 debug_rtx_list (get_insns (), 100);
c9d691e9 16745 final (get_insns (), file, FALSE);
9ebbca7d 16746 end_sequence ();
4697a36c 16747 }
9878760c 16748 }
b4ac57ab 16749
efdba735
SH
16750#if TARGET_MACHO
16751 macho_branch_islands ();
0e5da0be
GK
16752 /* Mach-O doesn't support labels at the end of objects, so if
16753 it looks like we might want one, insert a NOP. */
16754 {
16755 rtx insn = get_last_insn ();
16756 while (insn
16757 && NOTE_P (insn)
a38e7aa5 16758 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16759 insn = PREV_INSN (insn);
f676971a
EC
16760 if (insn
16761 && (LABEL_P (insn)
0e5da0be 16762 || (NOTE_P (insn)
a38e7aa5 16763 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16764 fputs ("\tnop\n", file);
16765 }
16766#endif
16767
9b30bae2 16768 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16769 on its format.
16770
16771 We don't output a traceback table if -finhibit-size-directive was
16772 used. The documentation for -finhibit-size-directive reads
16773 ``don't output a @code{.size} assembler directive, or anything
16774 else that would cause trouble if the function is split in the
16775 middle, and the two halves are placed at locations far apart in
16776 memory.'' The traceback table has this property, since it
16777 includes the offset from the start of the function to the
4d30c363
MM
16778 traceback table itself.
16779
16780 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16781 different traceback table. */
57ac7be9 16782 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16783 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16784 {
69c75916 16785 const char *fname = NULL;
3ac88239 16786 const char *language_string = lang_hooks.name;
6041bf2f 16787 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16788 int i;
57ac7be9 16789 int optional_tbtab;
8097c268 16790 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16791
16792 if (rs6000_traceback == traceback_full)
16793 optional_tbtab = 1;
16794 else if (rs6000_traceback == traceback_part)
16795 optional_tbtab = 0;
16796 else
16797 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16798
69c75916
AM
16799 if (optional_tbtab)
16800 {
16801 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16802 while (*fname == '.') /* V.4 encodes . in the name */
16803 fname++;
16804
16805 /* Need label immediately before tbtab, so we can compute
16806 its offset from the function start. */
16807 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16808 ASM_OUTPUT_LABEL (file, fname);
16809 }
314fc5a9
ILT
16810
16811 /* The .tbtab pseudo-op can only be used for the first eight
16812 expressions, since it can't handle the possibly variable
16813 length fields that follow. However, if you omit the optional
16814 fields, the assembler outputs zeros for all optional fields
16815 anyways, giving each variable length field is minimum length
16816 (as defined in sys/debug.h). Thus we can not use the .tbtab
16817 pseudo-op at all. */
16818
16819 /* An all-zero word flags the start of the tbtab, for debuggers
16820 that have to find it by searching forward from the entry
16821 point or from the current pc. */
19d2d16f 16822 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16823
16824 /* Tbtab format type. Use format type 0. */
19d2d16f 16825 fputs ("\t.byte 0,", file);
314fc5a9 16826
5fc921c1
DE
16827 /* Language type. Unfortunately, there does not seem to be any
16828 official way to discover the language being compiled, so we
16829 use language_string.
16830 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16831 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16832 a number, so for now use 9. */
5fc921c1 16833 if (! strcmp (language_string, "GNU C"))
314fc5a9 16834 i = 0;
6de9cd9a
DN
16835 else if (! strcmp (language_string, "GNU F77")
16836 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16837 i = 1;
8b83775b 16838 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16839 i = 2;
5fc921c1
DE
16840 else if (! strcmp (language_string, "GNU Ada"))
16841 i = 3;
56438901
AM
16842 else if (! strcmp (language_string, "GNU C++")
16843 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16844 i = 9;
9517ead8
AG
16845 else if (! strcmp (language_string, "GNU Java"))
16846 i = 13;
5fc921c1
DE
16847 else if (! strcmp (language_string, "GNU Objective-C"))
16848 i = 14;
314fc5a9 16849 else
37409796 16850 gcc_unreachable ();
314fc5a9
ILT
16851 fprintf (file, "%d,", i);
16852
16853 /* 8 single bit fields: global linkage (not set for C extern linkage,
16854 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16855 from start of procedure stored in tbtab, internal function, function
16856 has controlled storage, function has no toc, function uses fp,
16857 function logs/aborts fp operations. */
16858 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16859 fprintf (file, "%d,",
16860 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16861
16862 /* 6 bitfields: function is interrupt handler, name present in
16863 proc table, function calls alloca, on condition directives
16864 (controls stack walks, 3 bits), saves condition reg, saves
16865 link reg. */
16866 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16867 set up as a frame pointer, even when there is no alloca call. */
16868 fprintf (file, "%d,",
6041bf2f
DE
16869 ((optional_tbtab << 6)
16870 | ((optional_tbtab & frame_pointer_needed) << 5)
16871 | (info->cr_save_p << 1)
16872 | (info->lr_save_p)));
314fc5a9 16873
6041bf2f 16874 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16875 (6 bits). */
16876 fprintf (file, "%d,",
4697a36c 16877 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16878
16879 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16880 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16881
6041bf2f
DE
16882 if (optional_tbtab)
16883 {
16884 /* Compute the parameter info from the function decl argument
16885 list. */
16886 tree decl;
16887 int next_parm_info_bit = 31;
314fc5a9 16888
6041bf2f
DE
16889 for (decl = DECL_ARGUMENTS (current_function_decl);
16890 decl; decl = TREE_CHAIN (decl))
16891 {
16892 rtx parameter = DECL_INCOMING_RTL (decl);
16893 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16894
6041bf2f
DE
16895 if (GET_CODE (parameter) == REG)
16896 {
ebb109ad 16897 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16898 {
16899 int bits;
16900
16901 float_parms++;
16902
37409796
NS
16903 switch (mode)
16904 {
16905 case SFmode:
e41b2a33 16906 case SDmode:
37409796
NS
16907 bits = 0x2;
16908 break;
16909
16910 case DFmode:
7393f7f8 16911 case DDmode:
37409796 16912 case TFmode:
7393f7f8 16913 case TDmode:
37409796
NS
16914 bits = 0x3;
16915 break;
16916
16917 default:
16918 gcc_unreachable ();
16919 }
6041bf2f
DE
16920
16921 /* If only one bit will fit, don't or in this entry. */
16922 if (next_parm_info_bit > 0)
16923 parm_info |= (bits << (next_parm_info_bit - 1));
16924 next_parm_info_bit -= 2;
16925 }
16926 else
16927 {
16928 fixed_parms += ((GET_MODE_SIZE (mode)
16929 + (UNITS_PER_WORD - 1))
16930 / UNITS_PER_WORD);
16931 next_parm_info_bit -= 1;
16932 }
16933 }
16934 }
16935 }
314fc5a9
ILT
16936
16937 /* Number of fixed point parameters. */
16938 /* This is actually the number of words of fixed point parameters; thus
16939 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16940 fprintf (file, "%d,", fixed_parms);
16941
16942 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16943 all on stack. */
16944 /* This is actually the number of fp registers that hold parameters;
16945 and thus the maximum value is 13. */
16946 /* Set parameters on stack bit if parameters are not in their original
16947 registers, regardless of whether they are on the stack? Xlc
16948 seems to set the bit when not optimizing. */
16949 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16950
6041bf2f
DE
16951 if (! optional_tbtab)
16952 return;
16953
314fc5a9
ILT
16954 /* Optional fields follow. Some are variable length. */
16955
16956 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16957 11 double float. */
16958 /* There is an entry for each parameter in a register, in the order that
16959 they occur in the parameter list. Any intervening arguments on the
16960 stack are ignored. If the list overflows a long (max possible length
16961 34 bits) then completely leave off all elements that don't fit. */
16962 /* Only emit this long if there was at least one parameter. */
16963 if (fixed_parms || float_parms)
16964 fprintf (file, "\t.long %d\n", parm_info);
16965
16966 /* Offset from start of code to tb table. */
19d2d16f 16967 fputs ("\t.long ", file);
314fc5a9 16968 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16969 if (TARGET_AIX)
16970 RS6000_OUTPUT_BASENAME (file, fname);
16971 else
16972 assemble_name (file, fname);
16973 putc ('-', file);
16974 rs6000_output_function_entry (file, fname);
19d2d16f 16975 putc ('\n', file);
314fc5a9
ILT
16976
16977 /* Interrupt handler mask. */
16978 /* Omit this long, since we never set the interrupt handler bit
16979 above. */
16980
16981 /* Number of CTL (controlled storage) anchors. */
16982 /* Omit this long, since the has_ctl bit is never set above. */
16983
16984 /* Displacement into stack of each CTL anchor. */
16985 /* Omit this list of longs, because there are no CTL anchors. */
16986
16987 /* Length of function name. */
69c75916
AM
16988 if (*fname == '*')
16989 ++fname;
296b8152 16990 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16991
16992 /* Function name. */
16993 assemble_string (fname, strlen (fname));
16994
16995 /* Register for alloca automatic storage; this is always reg 31.
16996 Only emit this if the alloca bit was set above. */
16997 if (frame_pointer_needed)
19d2d16f 16998 fputs ("\t.byte 31\n", file);
b1765bde
DE
16999
17000 fputs ("\t.align 2\n", file);
9b30bae2 17001 }
9878760c 17002}
17167fd8 17003\f
a4f6c312
SS
17004/* A C compound statement that outputs the assembler code for a thunk
17005 function, used to implement C++ virtual function calls with
17006 multiple inheritance. The thunk acts as a wrapper around a virtual
17007 function, adjusting the implicit object parameter before handing
17008 control off to the real function.
17009
17010 First, emit code to add the integer DELTA to the location that
17011 contains the incoming first argument. Assume that this argument
17012 contains a pointer, and is the one used to pass the `this' pointer
17013 in C++. This is the incoming argument *before* the function
17014 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17015 values of all other incoming arguments.
17167fd8
MM
17016
17017 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17018 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17019 not touch the return address. Hence returning from FUNCTION will
17020 return to whoever called the current `thunk'.
17167fd8 17021
a4f6c312
SS
17022 The effect must be as if FUNCTION had been called directly with the
17023 adjusted first argument. This macro is responsible for emitting
17024 all of the code for a thunk function; output_function_prologue()
17025 and output_function_epilogue() are not invoked.
17167fd8 17026
a4f6c312
SS
17027 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17028 been extracted from it.) It might possibly be useful on some
17029 targets, but probably not.
17167fd8 17030
a4f6c312
SS
17031 If you do not define this macro, the target-independent code in the
17032 C++ frontend will generate a less efficient heavyweight thunk that
17033 calls FUNCTION instead of jumping to it. The generic approach does
17034 not support varargs. */
17167fd8 17035
3961e8fe 17036static void
f676971a
EC
17037rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17038 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17039 tree function)
17167fd8 17040{
5b71a4e7 17041 rtx this, insn, funexp;
17167fd8 17042
5b71a4e7 17043 reload_completed = 1;
fe3ad572 17044 epilogue_completed = 1;
56a7189a 17045
5b71a4e7 17046 /* Mark the end of the (empty) prologue. */
2e040219 17047 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17048
5b71a4e7
DE
17049 /* Find the "this" pointer. If the function returns a structure,
17050 the structure return pointer is in r3. */
61f71b34 17051 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17052 this = gen_rtx_REG (Pmode, 4);
56a7189a 17053 else
5b71a4e7 17054 this = gen_rtx_REG (Pmode, 3);
17167fd8 17055
5b71a4e7
DE
17056 /* Apply the constant offset, if required. */
17057 if (delta)
17058 {
17059 rtx delta_rtx = GEN_INT (delta);
17060 emit_insn (TARGET_32BIT
17061 ? gen_addsi3 (this, this, delta_rtx)
17062 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17063 }
17064
5b71a4e7
DE
17065 /* Apply the offset from the vtable, if required. */
17066 if (vcall_offset)
17167fd8 17067 {
5b71a4e7
DE
17068 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17069 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17070
5b71a4e7 17071 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17072 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17073 {
17074 emit_insn (TARGET_32BIT
17075 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17076 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17077 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17078 }
17079 else
17080 {
17081 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17082
17083 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17084 }
5b71a4e7
DE
17085 emit_insn (TARGET_32BIT
17086 ? gen_addsi3 (this, this, tmp)
17087 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17088 }
17089
5b71a4e7
DE
17090 /* Generate a tail call to the target function. */
17091 if (!TREE_USED (function))
17092 {
17093 assemble_external (function);
17094 TREE_USED (function) = 1;
17095 }
17096 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17097 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17098
17099#if TARGET_MACHO
ab82a49f 17100 if (MACHOPIC_INDIRECT)
5b71a4e7 17101 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17102#endif
5b71a4e7
DE
17103
17104 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17105 generate sibcall RTL explicitly. */
5b71a4e7
DE
17106 insn = emit_call_insn (
17107 gen_rtx_PARALLEL (VOIDmode,
17108 gen_rtvec (4,
17109 gen_rtx_CALL (VOIDmode,
17110 funexp, const0_rtx),
17111 gen_rtx_USE (VOIDmode, const0_rtx),
17112 gen_rtx_USE (VOIDmode,
17113 gen_rtx_REG (SImode,
1de43f85 17114 LR_REGNO)),
5b71a4e7
DE
17115 gen_rtx_RETURN (VOIDmode))));
17116 SIBLING_CALL_P (insn) = 1;
17117 emit_barrier ();
17118
17119 /* Run just enough of rest_of_compilation to get the insns emitted.
17120 There's not really enough bulk here to make other passes such as
17121 instruction scheduling worth while. Note that use_thunk calls
17122 assemble_start_function and assemble_end_function. */
17123 insn = get_insns ();
55e092c4 17124 insn_locators_alloc ();
5b71a4e7
DE
17125 shorten_branches (insn);
17126 final_start_function (insn, file, 1);
c9d691e9 17127 final (insn, file, 1);
5b71a4e7 17128 final_end_function ();
d7087dd2 17129 free_after_compilation (cfun);
5b71a4e7
DE
17130
17131 reload_completed = 0;
fe3ad572 17132 epilogue_completed = 0;
9ebbca7d 17133}
9ebbca7d
GK
17134\f
17135/* A quick summary of the various types of 'constant-pool tables'
17136 under PowerPC:
17137
f676971a 17138 Target Flags Name One table per
9ebbca7d
GK
17139 AIX (none) AIX TOC object file
17140 AIX -mfull-toc AIX TOC object file
17141 AIX -mminimal-toc AIX minimal TOC translation unit
17142 SVR4/EABI (none) SVR4 SDATA object file
17143 SVR4/EABI -fpic SVR4 pic object file
17144 SVR4/EABI -fPIC SVR4 PIC translation unit
17145 SVR4/EABI -mrelocatable EABI TOC function
17146 SVR4/EABI -maix AIX TOC object file
f676971a 17147 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17148 AIX minimal TOC translation unit
17149
17150 Name Reg. Set by entries contains:
17151 made by addrs? fp? sum?
17152
17153 AIX TOC 2 crt0 as Y option option
17154 AIX minimal TOC 30 prolog gcc Y Y option
17155 SVR4 SDATA 13 crt0 gcc N Y N
17156 SVR4 pic 30 prolog ld Y not yet N
17157 SVR4 PIC 30 prolog gcc Y option option
17158 EABI TOC 30 prolog gcc Y option option
17159
17160*/
17161
9ebbca7d
GK
17162/* Hash functions for the hash table. */
17163
17164static unsigned
a2369ed3 17165rs6000_hash_constant (rtx k)
9ebbca7d 17166{
46b33600
RH
17167 enum rtx_code code = GET_CODE (k);
17168 enum machine_mode mode = GET_MODE (k);
17169 unsigned result = (code << 3) ^ mode;
17170 const char *format;
17171 int flen, fidx;
f676971a 17172
46b33600
RH
17173 format = GET_RTX_FORMAT (code);
17174 flen = strlen (format);
17175 fidx = 0;
9ebbca7d 17176
46b33600
RH
17177 switch (code)
17178 {
17179 case LABEL_REF:
17180 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17181
17182 case CONST_DOUBLE:
17183 if (mode != VOIDmode)
17184 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17185 flen = 2;
17186 break;
17187
17188 case CODE_LABEL:
17189 fidx = 3;
17190 break;
17191
17192 default:
17193 break;
17194 }
9ebbca7d
GK
17195
17196 for (; fidx < flen; fidx++)
17197 switch (format[fidx])
17198 {
17199 case 's':
17200 {
17201 unsigned i, len;
17202 const char *str = XSTR (k, fidx);
17203 len = strlen (str);
17204 result = result * 613 + len;
17205 for (i = 0; i < len; i++)
17206 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17207 break;
17208 }
9ebbca7d
GK
17209 case 'u':
17210 case 'e':
17211 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17212 break;
17213 case 'i':
17214 case 'n':
17215 result = result * 613 + (unsigned) XINT (k, fidx);
17216 break;
17217 case 'w':
17218 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17219 result = result * 613 + (unsigned) XWINT (k, fidx);
17220 else
17221 {
17222 size_t i;
9390387d 17223 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17224 result = result * 613 + (unsigned) (XWINT (k, fidx)
17225 >> CHAR_BIT * i);
17226 }
17227 break;
09501938
DE
17228 case '0':
17229 break;
9ebbca7d 17230 default:
37409796 17231 gcc_unreachable ();
9ebbca7d 17232 }
46b33600 17233
9ebbca7d
GK
17234 return result;
17235}
17236
17237static unsigned
a2369ed3 17238toc_hash_function (const void *hash_entry)
9ebbca7d 17239{
f676971a 17240 const struct toc_hash_struct *thc =
a9098fd0
GK
17241 (const struct toc_hash_struct *) hash_entry;
17242 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17243}
17244
17245/* Compare H1 and H2 for equivalence. */
17246
17247static int
a2369ed3 17248toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17249{
17250 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17251 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17252
a9098fd0
GK
17253 if (((const struct toc_hash_struct *) h1)->key_mode
17254 != ((const struct toc_hash_struct *) h2)->key_mode)
17255 return 0;
17256
5692c7bc 17257 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17258}
17259
28e510bd
MM
17260/* These are the names given by the C++ front-end to vtables, and
17261 vtable-like objects. Ideally, this logic should not be here;
17262 instead, there should be some programmatic way of inquiring as
17263 to whether or not an object is a vtable. */
17264
17265#define VTABLE_NAME_P(NAME) \
9390387d 17266 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17267 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17268 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17269 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17270 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17271
17272void
a2369ed3 17273rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17274{
17275 /* Currently C++ toc references to vtables can be emitted before it
17276 is decided whether the vtable is public or private. If this is
17277 the case, then the linker will eventually complain that there is
f676971a 17278 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17279 we emit the TOC reference to reference the symbol and not the
17280 section. */
17281 const char *name = XSTR (x, 0);
54ee9799 17282
f676971a 17283 if (VTABLE_NAME_P (name))
54ee9799
DE
17284 {
17285 RS6000_OUTPUT_BASENAME (file, name);
17286 }
17287 else
17288 assemble_name (file, name);
28e510bd
MM
17289}
17290
a4f6c312
SS
17291/* Output a TOC entry. We derive the entry name from what is being
17292 written. */
9878760c
RK
17293
17294void
a2369ed3 17295output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17296{
17297 char buf[256];
3cce094d 17298 const char *name = buf;
ec940faa 17299 const char *real_name;
9878760c 17300 rtx base = x;
16fdeb48 17301 HOST_WIDE_INT offset = 0;
9878760c 17302
37409796 17303 gcc_assert (!TARGET_NO_TOC);
4697a36c 17304
9ebbca7d
GK
17305 /* When the linker won't eliminate them, don't output duplicate
17306 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17307 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17308 CODE_LABELs. */
17309 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17310 {
17311 struct toc_hash_struct *h;
17312 void * * found;
f676971a 17313
17211ab5 17314 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17315 time because GGC is not initialized at that point. */
17211ab5 17316 if (toc_hash_table == NULL)
f676971a 17317 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17318 toc_hash_eq, NULL);
17319
9ebbca7d
GK
17320 h = ggc_alloc (sizeof (*h));
17321 h->key = x;
a9098fd0 17322 h->key_mode = mode;
9ebbca7d 17323 h->labelno = labelno;
f676971a 17324
9ebbca7d
GK
17325 found = htab_find_slot (toc_hash_table, h, 1);
17326 if (*found == NULL)
17327 *found = h;
f676971a 17328 else /* This is indeed a duplicate.
9ebbca7d
GK
17329 Set this label equal to that label. */
17330 {
17331 fputs ("\t.set ", file);
17332 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17333 fprintf (file, "%d,", labelno);
17334 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17335 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17336 found)->labelno));
17337 return;
17338 }
17339 }
17340
17341 /* If we're going to put a double constant in the TOC, make sure it's
17342 aligned properly when strict alignment is on. */
ff1720ed
RK
17343 if (GET_CODE (x) == CONST_DOUBLE
17344 && STRICT_ALIGNMENT
a9098fd0 17345 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17346 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17347 ASM_OUTPUT_ALIGN (file, 3);
17348 }
17349
4977bab6 17350 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17351
37c37a57
RK
17352 /* Handle FP constants specially. Note that if we have a minimal
17353 TOC, things we put here aren't actually in the TOC, so we can allow
17354 FP constants. */
00b79d54
BE
17355 if (GET_CODE (x) == CONST_DOUBLE &&
17356 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17357 {
17358 REAL_VALUE_TYPE rv;
17359 long k[4];
17360
17361 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17362 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17363 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17364 else
17365 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17366
17367 if (TARGET_64BIT)
17368 {
17369 if (TARGET_MINIMAL_TOC)
17370 fputs (DOUBLE_INT_ASM_OP, file);
17371 else
17372 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17373 k[0] & 0xffffffff, k[1] & 0xffffffff,
17374 k[2] & 0xffffffff, k[3] & 0xffffffff);
17375 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17376 k[0] & 0xffffffff, k[1] & 0xffffffff,
17377 k[2] & 0xffffffff, k[3] & 0xffffffff);
17378 return;
17379 }
17380 else
17381 {
17382 if (TARGET_MINIMAL_TOC)
17383 fputs ("\t.long ", file);
17384 else
17385 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17386 k[0] & 0xffffffff, k[1] & 0xffffffff,
17387 k[2] & 0xffffffff, k[3] & 0xffffffff);
17388 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17389 k[0] & 0xffffffff, k[1] & 0xffffffff,
17390 k[2] & 0xffffffff, k[3] & 0xffffffff);
17391 return;
17392 }
17393 }
00b79d54
BE
17394 else if (GET_CODE (x) == CONST_DOUBLE &&
17395 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17396 {
042259f2
DE
17397 REAL_VALUE_TYPE rv;
17398 long k[2];
0adc764e 17399
042259f2 17400 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17401
17402 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17403 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17404 else
17405 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17406
13ded975
DE
17407 if (TARGET_64BIT)
17408 {
17409 if (TARGET_MINIMAL_TOC)
2bfcf297 17410 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17411 else
2f0552b6
AM
17412 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17413 k[0] & 0xffffffff, k[1] & 0xffffffff);
17414 fprintf (file, "0x%lx%08lx\n",
17415 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17416 return;
17417 }
1875cc88 17418 else
13ded975
DE
17419 {
17420 if (TARGET_MINIMAL_TOC)
2bfcf297 17421 fputs ("\t.long ", file);
13ded975 17422 else
2f0552b6
AM
17423 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17424 k[0] & 0xffffffff, k[1] & 0xffffffff);
17425 fprintf (file, "0x%lx,0x%lx\n",
17426 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17427 return;
17428 }
9878760c 17429 }
00b79d54
BE
17430 else if (GET_CODE (x) == CONST_DOUBLE &&
17431 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17432 {
042259f2
DE
17433 REAL_VALUE_TYPE rv;
17434 long l;
9878760c 17435
042259f2 17436 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17437 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17438 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17439 else
17440 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17441
31bfaa0b
DE
17442 if (TARGET_64BIT)
17443 {
17444 if (TARGET_MINIMAL_TOC)
2bfcf297 17445 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17446 else
2f0552b6
AM
17447 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17448 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17449 return;
17450 }
042259f2 17451 else
31bfaa0b
DE
17452 {
17453 if (TARGET_MINIMAL_TOC)
2bfcf297 17454 fputs ("\t.long ", file);
31bfaa0b 17455 else
2f0552b6
AM
17456 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17457 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17458 return;
17459 }
042259f2 17460 }
f176e826 17461 else if (GET_MODE (x) == VOIDmode
a9098fd0 17462 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17463 {
e2c953b6 17464 unsigned HOST_WIDE_INT low;
042259f2
DE
17465 HOST_WIDE_INT high;
17466
17467 if (GET_CODE (x) == CONST_DOUBLE)
17468 {
17469 low = CONST_DOUBLE_LOW (x);
17470 high = CONST_DOUBLE_HIGH (x);
17471 }
17472 else
17473#if HOST_BITS_PER_WIDE_INT == 32
17474 {
17475 low = INTVAL (x);
0858c623 17476 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17477 }
17478#else
17479 {
c4ad648e
AM
17480 low = INTVAL (x) & 0xffffffff;
17481 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17482 }
17483#endif
9878760c 17484
a9098fd0
GK
17485 /* TOC entries are always Pmode-sized, but since this
17486 is a bigendian machine then if we're putting smaller
17487 integer constants in the TOC we have to pad them.
17488 (This is still a win over putting the constants in
17489 a separate constant pool, because then we'd have
02a4ec28
FS
17490 to have both a TOC entry _and_ the actual constant.)
17491
17492 For a 32-bit target, CONST_INT values are loaded and shifted
17493 entirely within `low' and can be stored in one TOC entry. */
17494
37409796
NS
17495 /* It would be easy to make this work, but it doesn't now. */
17496 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17497
17498 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17499 {
17500#if HOST_BITS_PER_WIDE_INT == 32
17501 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17502 POINTER_SIZE, &low, &high, 0);
17503#else
17504 low |= high << 32;
17505 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17506 high = (HOST_WIDE_INT) low >> 32;
17507 low &= 0xffffffff;
17508#endif
17509 }
a9098fd0 17510
13ded975
DE
17511 if (TARGET_64BIT)
17512 {
17513 if (TARGET_MINIMAL_TOC)
2bfcf297 17514 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17515 else
2f0552b6
AM
17516 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17517 (long) high & 0xffffffff, (long) low & 0xffffffff);
17518 fprintf (file, "0x%lx%08lx\n",
17519 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17520 return;
17521 }
1875cc88 17522 else
13ded975 17523 {
02a4ec28
FS
17524 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17525 {
17526 if (TARGET_MINIMAL_TOC)
2bfcf297 17527 fputs ("\t.long ", file);
02a4ec28 17528 else
2bfcf297 17529 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17530 (long) high & 0xffffffff, (long) low & 0xffffffff);
17531 fprintf (file, "0x%lx,0x%lx\n",
17532 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17533 }
13ded975 17534 else
02a4ec28
FS
17535 {
17536 if (TARGET_MINIMAL_TOC)
2bfcf297 17537 fputs ("\t.long ", file);
02a4ec28 17538 else
2f0552b6
AM
17539 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17540 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17541 }
13ded975
DE
17542 return;
17543 }
9878760c
RK
17544 }
17545
17546 if (GET_CODE (x) == CONST)
17547 {
37409796 17548 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17549
9878760c
RK
17550 base = XEXP (XEXP (x, 0), 0);
17551 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17552 }
f676971a 17553
37409796
NS
17554 switch (GET_CODE (base))
17555 {
17556 case SYMBOL_REF:
17557 name = XSTR (base, 0);
17558 break;
17559
17560 case LABEL_REF:
17561 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17562 CODE_LABEL_NUMBER (XEXP (base, 0)));
17563 break;
17564
17565 case CODE_LABEL:
17566 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17567 break;
17568
17569 default:
17570 gcc_unreachable ();
17571 }
9878760c 17572
772c5265 17573 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17574 if (TARGET_MINIMAL_TOC)
2bfcf297 17575 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17576 else
17577 {
b6c9286a 17578 fprintf (file, "\t.tc %s", real_name);
9878760c 17579
1875cc88 17580 if (offset < 0)
16fdeb48 17581 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17582 else if (offset)
16fdeb48 17583 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17584
19d2d16f 17585 fputs ("[TC],", file);
1875cc88 17586 }
581bc4de
MM
17587
17588 /* Currently C++ toc references to vtables can be emitted before it
17589 is decided whether the vtable is public or private. If this is
17590 the case, then the linker will eventually complain that there is
17591 a TOC reference to an unknown section. Thus, for vtables only,
17592 we emit the TOC reference to reference the symbol and not the
17593 section. */
28e510bd 17594 if (VTABLE_NAME_P (name))
581bc4de 17595 {
54ee9799 17596 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17597 if (offset < 0)
16fdeb48 17598 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17599 else if (offset > 0)
16fdeb48 17600 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17601 }
17602 else
17603 output_addr_const (file, x);
19d2d16f 17604 putc ('\n', file);
9878760c
RK
17605}
17606\f
17607/* Output an assembler pseudo-op to write an ASCII string of N characters
17608 starting at P to FILE.
17609
17610 On the RS/6000, we have to do this using the .byte operation and
17611 write out special characters outside the quoted string.
17612 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17613 so we must artificially break them up early. */
9878760c
RK
17614
17615void
a2369ed3 17616output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17617{
17618 char c;
17619 int i, count_string;
d330fd93
KG
17620 const char *for_string = "\t.byte \"";
17621 const char *for_decimal = "\t.byte ";
17622 const char *to_close = NULL;
9878760c
RK
17623
17624 count_string = 0;
17625 for (i = 0; i < n; i++)
17626 {
17627 c = *p++;
17628 if (c >= ' ' && c < 0177)
17629 {
17630 if (for_string)
17631 fputs (for_string, file);
17632 putc (c, file);
17633
17634 /* Write two quotes to get one. */
17635 if (c == '"')
17636 {
17637 putc (c, file);
17638 ++count_string;
17639 }
17640
17641 for_string = NULL;
17642 for_decimal = "\"\n\t.byte ";
17643 to_close = "\"\n";
17644 ++count_string;
17645
17646 if (count_string >= 512)
17647 {
17648 fputs (to_close, file);
17649
17650 for_string = "\t.byte \"";
17651 for_decimal = "\t.byte ";
17652 to_close = NULL;
17653 count_string = 0;
17654 }
17655 }
17656 else
17657 {
17658 if (for_decimal)
17659 fputs (for_decimal, file);
17660 fprintf (file, "%d", c);
17661
17662 for_string = "\n\t.byte \"";
17663 for_decimal = ", ";
17664 to_close = "\n";
17665 count_string = 0;
17666 }
17667 }
17668
17669 /* Now close the string if we have written one. Then end the line. */
17670 if (to_close)
9ebbca7d 17671 fputs (to_close, file);
9878760c
RK
17672}
17673\f
17674/* Generate a unique section name for FILENAME for a section type
17675 represented by SECTION_DESC. Output goes into BUF.
17676
17677 SECTION_DESC can be any string, as long as it is different for each
17678 possible section type.
17679
17680 We name the section in the same manner as xlc. The name begins with an
17681 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17682 names) with the last period replaced by the string SECTION_DESC. If
17683 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17684 the name. */
9878760c
RK
17685
17686void
f676971a 17687rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17688 const char *section_desc)
9878760c 17689{
9ebbca7d 17690 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17691 char *p;
17692 int len;
9878760c
RK
17693
17694 after_last_slash = filename;
17695 for (q = filename; *q; q++)
11e5fe42
RK
17696 {
17697 if (*q == '/')
17698 after_last_slash = q + 1;
17699 else if (*q == '.')
17700 last_period = q;
17701 }
9878760c 17702
11e5fe42 17703 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17704 *buf = (char *) xmalloc (len);
9878760c
RK
17705
17706 p = *buf;
17707 *p++ = '_';
17708
17709 for (q = after_last_slash; *q; q++)
17710 {
11e5fe42 17711 if (q == last_period)
c4ad648e 17712 {
9878760c
RK
17713 strcpy (p, section_desc);
17714 p += strlen (section_desc);
e3981aab 17715 break;
c4ad648e 17716 }
9878760c 17717
e9a780ec 17718 else if (ISALNUM (*q))
c4ad648e 17719 *p++ = *q;
9878760c
RK
17720 }
17721
11e5fe42 17722 if (last_period == 0)
9878760c
RK
17723 strcpy (p, section_desc);
17724 else
17725 *p = '\0';
17726}
e165f3f0 17727\f
a4f6c312 17728/* Emit profile function. */
411707f4 17729
411707f4 17730void
a2369ed3 17731output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17732{
858081ad
AH
17733 /* Non-standard profiling for kernels, which just saves LR then calls
17734 _mcount without worrying about arg saves. The idea is to change
17735 the function prologue as little as possible as it isn't easy to
17736 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17737 if (TARGET_PROFILE_KERNEL)
17738 return;
17739
8480e480
CC
17740 if (DEFAULT_ABI == ABI_AIX)
17741 {
9739c90c
JJ
17742#ifndef NO_PROFILE_COUNTERS
17743# define NO_PROFILE_COUNTERS 0
17744#endif
f676971a 17745 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17746 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17747 else
17748 {
17749 char buf[30];
17750 const char *label_name;
17751 rtx fun;
411707f4 17752
9739c90c
JJ
17753 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17754 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17755 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17756
9739c90c
JJ
17757 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17758 fun, Pmode);
17759 }
8480e480 17760 }
ee890fe2
SS
17761 else if (DEFAULT_ABI == ABI_DARWIN)
17762 {
d5fa86ba 17763 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17764 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17765
17766 /* Be conservative and always set this, at least for now. */
17767 current_function_uses_pic_offset_table = 1;
17768
17769#if TARGET_MACHO
17770 /* For PIC code, set up a stub and collect the caller's address
17771 from r0, which is where the prologue puts it. */
11abc112
MM
17772 if (MACHOPIC_INDIRECT
17773 && current_function_uses_pic_offset_table)
17774 caller_addr_regno = 0;
ee890fe2
SS
17775#endif
17776 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17777 0, VOIDmode, 1,
17778 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17779 }
411707f4
CC
17780}
17781
a4f6c312 17782/* Write function profiler code. */
e165f3f0
RK
17783
17784void
a2369ed3 17785output_function_profiler (FILE *file, int labelno)
e165f3f0 17786{
3daf36a4 17787 char buf[100];
e165f3f0 17788
38c1f2d7 17789 switch (DEFAULT_ABI)
3daf36a4 17790 {
38c1f2d7 17791 default:
37409796 17792 gcc_unreachable ();
38c1f2d7
MM
17793
17794 case ABI_V4:
09eeeacb
AM
17795 if (!TARGET_32BIT)
17796 {
d4ee4d25 17797 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17798 return;
17799 }
ffcfcb5f 17800 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17801 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17802 if (NO_PROFILE_COUNTERS)
17803 {
17804 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17805 reg_names[0], reg_names[1]);
17806 }
17807 else if (TARGET_SECURE_PLT && flag_pic)
17808 {
17809 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17810 reg_names[0], reg_names[1]);
17811 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17812 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17813 reg_names[12], reg_names[12]);
17814 assemble_name (file, buf);
17815 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17816 assemble_name (file, buf);
17817 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17818 }
17819 else if (flag_pic == 1)
38c1f2d7 17820 {
dfdfa60f 17821 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17822 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17823 reg_names[0], reg_names[1]);
17167fd8 17824 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17825 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17826 assemble_name (file, buf);
17167fd8 17827 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17828 }
9ebbca7d 17829 else if (flag_pic > 1)
38c1f2d7 17830 {
71625f3d
AM
17831 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17832 reg_names[0], reg_names[1]);
9ebbca7d 17833 /* Now, we need to get the address of the label. */
71625f3d 17834 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17835 assemble_name (file, buf);
9ebbca7d
GK
17836 fputs ("-.\n1:", file);
17837 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17838 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17839 reg_names[0], reg_names[11]);
17840 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17841 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17842 }
38c1f2d7
MM
17843 else
17844 {
17167fd8 17845 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17846 assemble_name (file, buf);
dfdfa60f 17847 fputs ("@ha\n", file);
71625f3d
AM
17848 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17849 reg_names[0], reg_names[1]);
a260abc9 17850 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17851 assemble_name (file, buf);
17167fd8 17852 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17853 }
17854
50d440bc 17855 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17856 fprintf (file, "\tbl %s%s\n",
17857 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17858 break;
17859
17860 case ABI_AIX:
ee890fe2 17861 case ABI_DARWIN:
ffcfcb5f
AM
17862 if (!TARGET_PROFILE_KERNEL)
17863 {
a3c9585f 17864 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17865 }
17866 else
17867 {
37409796 17868 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17869
17870 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17871 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17872
6de9cd9a 17873 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17874 {
17875 asm_fprintf (file, "\tstd %s,24(%s)\n",
17876 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17877 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17878 asm_fprintf (file, "\tld %s,24(%s)\n",
17879 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17880 }
17881 else
17882 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17883 }
38c1f2d7
MM
17884 break;
17885 }
e165f3f0 17886}
a251ffd0 17887
b54cf83a 17888\f
44cd321e
PS
17889
17890/* The following variable value is the last issued insn. */
17891
17892static rtx last_scheduled_insn;
17893
17894/* The following variable helps to balance issuing of load and
17895 store instructions */
17896
17897static int load_store_pendulum;
17898
b54cf83a
DE
17899/* Power4 load update and store update instructions are cracked into a
17900 load or store and an integer insn which are executed in the same cycle.
17901 Branches have their own dispatch slot which does not count against the
17902 GCC issue rate, but it changes the program flow so there are no other
17903 instructions to issue in this cycle. */
17904
17905static int
f676971a
EC
17906rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17907 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17908 rtx insn, int more)
b54cf83a 17909{
44cd321e 17910 last_scheduled_insn = insn;
b54cf83a
DE
17911 if (GET_CODE (PATTERN (insn)) == USE
17912 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17913 {
17914 cached_can_issue_more = more;
17915 return cached_can_issue_more;
17916 }
17917
17918 if (insn_terminates_group_p (insn, current_group))
17919 {
17920 cached_can_issue_more = 0;
17921 return cached_can_issue_more;
17922 }
b54cf83a 17923
d296e02e
AP
17924 /* If no reservation, but reach here */
17925 if (recog_memoized (insn) < 0)
17926 return more;
17927
ec507f2d 17928 if (rs6000_sched_groups)
b54cf83a 17929 {
cbe26ab8 17930 if (is_microcoded_insn (insn))
44cd321e 17931 cached_can_issue_more = 0;
cbe26ab8 17932 else if (is_cracked_insn (insn))
44cd321e
PS
17933 cached_can_issue_more = more > 2 ? more - 2 : 0;
17934 else
17935 cached_can_issue_more = more - 1;
17936
17937 return cached_can_issue_more;
b54cf83a 17938 }
165b263e 17939
d296e02e
AP
17940 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17941 return 0;
17942
44cd321e
PS
17943 cached_can_issue_more = more - 1;
17944 return cached_can_issue_more;
b54cf83a
DE
17945}
17946
a251ffd0
TG
17947/* Adjust the cost of a scheduling dependency. Return the new cost of
17948 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17949
c237e94a 17950static int
0a4f0294 17951rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17952{
44cd321e 17953 enum attr_type attr_type;
a251ffd0 17954
44cd321e 17955 if (! recog_memoized (insn))
a251ffd0
TG
17956 return 0;
17957
44cd321e 17958 switch (REG_NOTE_KIND (link))
a251ffd0 17959 {
44cd321e
PS
17960 case REG_DEP_TRUE:
17961 {
17962 /* Data dependency; DEP_INSN writes a register that INSN reads
17963 some cycles later. */
17964
17965 /* Separate a load from a narrower, dependent store. */
17966 if (rs6000_sched_groups
17967 && GET_CODE (PATTERN (insn)) == SET
17968 && GET_CODE (PATTERN (dep_insn)) == SET
17969 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17970 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17971 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17972 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17973 return cost + 14;
17974
17975 attr_type = get_attr_type (insn);
17976
17977 switch (attr_type)
17978 {
17979 case TYPE_JMPREG:
17980 /* Tell the first scheduling pass about the latency between
17981 a mtctr and bctr (and mtlr and br/blr). The first
17982 scheduling pass will not know about this latency since
17983 the mtctr instruction, which has the latency associated
17984 to it, will be generated by reload. */
17985 return TARGET_POWER ? 5 : 4;
17986 case TYPE_BRANCH:
17987 /* Leave some extra cycles between a compare and its
17988 dependent branch, to inhibit expensive mispredicts. */
17989 if ((rs6000_cpu_attr == CPU_PPC603
17990 || rs6000_cpu_attr == CPU_PPC604
17991 || rs6000_cpu_attr == CPU_PPC604E
17992 || rs6000_cpu_attr == CPU_PPC620
17993 || rs6000_cpu_attr == CPU_PPC630
17994 || rs6000_cpu_attr == CPU_PPC750
17995 || rs6000_cpu_attr == CPU_PPC7400
17996 || rs6000_cpu_attr == CPU_PPC7450
17997 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17998 || rs6000_cpu_attr == CPU_POWER5
17999 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18000 && recog_memoized (dep_insn)
18001 && (INSN_CODE (dep_insn) >= 0))
982afe02 18002
44cd321e
PS
18003 switch (get_attr_type (dep_insn))
18004 {
18005 case TYPE_CMP:
18006 case TYPE_COMPARE:
18007 case TYPE_DELAYED_COMPARE:
18008 case TYPE_IMUL_COMPARE:
18009 case TYPE_LMUL_COMPARE:
18010 case TYPE_FPCOMPARE:
18011 case TYPE_CR_LOGICAL:
18012 case TYPE_DELAYED_CR:
18013 return cost + 2;
18014 default:
18015 break;
18016 }
18017 break;
18018
18019 case TYPE_STORE:
18020 case TYPE_STORE_U:
18021 case TYPE_STORE_UX:
18022 case TYPE_FPSTORE:
18023 case TYPE_FPSTORE_U:
18024 case TYPE_FPSTORE_UX:
18025 if ((rs6000_cpu == PROCESSOR_POWER6)
18026 && recog_memoized (dep_insn)
18027 && (INSN_CODE (dep_insn) >= 0))
18028 {
18029
18030 if (GET_CODE (PATTERN (insn)) != SET)
18031 /* If this happens, we have to extend this to schedule
18032 optimally. Return default for now. */
18033 return cost;
18034
18035 /* Adjust the cost for the case where the value written
18036 by a fixed point operation is used as the address
18037 gen value on a store. */
18038 switch (get_attr_type (dep_insn))
18039 {
18040 case TYPE_LOAD:
18041 case TYPE_LOAD_U:
18042 case TYPE_LOAD_UX:
18043 case TYPE_CNTLZ:
18044 {
18045 if (! store_data_bypass_p (dep_insn, insn))
18046 return 4;
18047 break;
18048 }
18049 case TYPE_LOAD_EXT:
18050 case TYPE_LOAD_EXT_U:
18051 case TYPE_LOAD_EXT_UX:
18052 case TYPE_VAR_SHIFT_ROTATE:
18053 case TYPE_VAR_DELAYED_COMPARE:
18054 {
18055 if (! store_data_bypass_p (dep_insn, insn))
18056 return 6;
18057 break;
18058 }
18059 case TYPE_INTEGER:
18060 case TYPE_COMPARE:
18061 case TYPE_FAST_COMPARE:
18062 case TYPE_EXTS:
18063 case TYPE_SHIFT:
18064 case TYPE_INSERT_WORD:
18065 case TYPE_INSERT_DWORD:
18066 case TYPE_FPLOAD_U:
18067 case TYPE_FPLOAD_UX:
18068 case TYPE_STORE_U:
18069 case TYPE_STORE_UX:
18070 case TYPE_FPSTORE_U:
18071 case TYPE_FPSTORE_UX:
18072 {
18073 if (! store_data_bypass_p (dep_insn, insn))
18074 return 3;
18075 break;
18076 }
18077 case TYPE_IMUL:
18078 case TYPE_IMUL2:
18079 case TYPE_IMUL3:
18080 case TYPE_LMUL:
18081 case TYPE_IMUL_COMPARE:
18082 case TYPE_LMUL_COMPARE:
18083 {
18084 if (! store_data_bypass_p (dep_insn, insn))
18085 return 17;
18086 break;
18087 }
18088 case TYPE_IDIV:
18089 {
18090 if (! store_data_bypass_p (dep_insn, insn))
18091 return 45;
18092 break;
18093 }
18094 case TYPE_LDIV:
18095 {
18096 if (! store_data_bypass_p (dep_insn, insn))
18097 return 57;
18098 break;
18099 }
18100 default:
18101 break;
18102 }
18103 }
18104 break;
18105
18106 case TYPE_LOAD:
18107 case TYPE_LOAD_U:
18108 case TYPE_LOAD_UX:
18109 case TYPE_LOAD_EXT:
18110 case TYPE_LOAD_EXT_U:
18111 case TYPE_LOAD_EXT_UX:
18112 if ((rs6000_cpu == PROCESSOR_POWER6)
18113 && recog_memoized (dep_insn)
18114 && (INSN_CODE (dep_insn) >= 0))
18115 {
18116
18117 /* Adjust the cost for the case where the value written
18118 by a fixed point instruction is used within the address
18119 gen portion of a subsequent load(u)(x) */
18120 switch (get_attr_type (dep_insn))
18121 {
18122 case TYPE_LOAD:
18123 case TYPE_LOAD_U:
18124 case TYPE_LOAD_UX:
18125 case TYPE_CNTLZ:
18126 {
18127 if (set_to_load_agen (dep_insn, insn))
18128 return 4;
18129 break;
18130 }
18131 case TYPE_LOAD_EXT:
18132 case TYPE_LOAD_EXT_U:
18133 case TYPE_LOAD_EXT_UX:
18134 case TYPE_VAR_SHIFT_ROTATE:
18135 case TYPE_VAR_DELAYED_COMPARE:
18136 {
18137 if (set_to_load_agen (dep_insn, insn))
18138 return 6;
18139 break;
18140 }
18141 case TYPE_INTEGER:
18142 case TYPE_COMPARE:
18143 case TYPE_FAST_COMPARE:
18144 case TYPE_EXTS:
18145 case TYPE_SHIFT:
18146 case TYPE_INSERT_WORD:
18147 case TYPE_INSERT_DWORD:
18148 case TYPE_FPLOAD_U:
18149 case TYPE_FPLOAD_UX:
18150 case TYPE_STORE_U:
18151 case TYPE_STORE_UX:
18152 case TYPE_FPSTORE_U:
18153 case TYPE_FPSTORE_UX:
18154 {
18155 if (set_to_load_agen (dep_insn, insn))
18156 return 3;
18157 break;
18158 }
18159 case TYPE_IMUL:
18160 case TYPE_IMUL2:
18161 case TYPE_IMUL3:
18162 case TYPE_LMUL:
18163 case TYPE_IMUL_COMPARE:
18164 case TYPE_LMUL_COMPARE:
18165 {
18166 if (set_to_load_agen (dep_insn, insn))
18167 return 17;
18168 break;
18169 }
18170 case TYPE_IDIV:
18171 {
18172 if (set_to_load_agen (dep_insn, insn))
18173 return 45;
18174 break;
18175 }
18176 case TYPE_LDIV:
18177 {
18178 if (set_to_load_agen (dep_insn, insn))
18179 return 57;
18180 break;
18181 }
18182 default:
18183 break;
18184 }
18185 }
18186 break;
18187
18188 case TYPE_FPLOAD:
18189 if ((rs6000_cpu == PROCESSOR_POWER6)
18190 && recog_memoized (dep_insn)
18191 && (INSN_CODE (dep_insn) >= 0)
18192 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18193 return 2;
18194
18195 default:
18196 break;
18197 }
c9dbf840 18198
a251ffd0 18199 /* Fall out to return default cost. */
44cd321e
PS
18200 }
18201 break;
18202
18203 case REG_DEP_OUTPUT:
18204 /* Output dependency; DEP_INSN writes a register that INSN writes some
18205 cycles later. */
18206 if ((rs6000_cpu == PROCESSOR_POWER6)
18207 && recog_memoized (dep_insn)
18208 && (INSN_CODE (dep_insn) >= 0))
18209 {
18210 attr_type = get_attr_type (insn);
18211
18212 switch (attr_type)
18213 {
18214 case TYPE_FP:
18215 if (get_attr_type (dep_insn) == TYPE_FP)
18216 return 1;
18217 break;
18218 case TYPE_FPLOAD:
18219 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18220 return 2;
18221 break;
18222 default:
18223 break;
18224 }
18225 }
18226 case REG_DEP_ANTI:
18227 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18228 cycles later. */
18229 return 0;
18230
18231 default:
18232 gcc_unreachable ();
a251ffd0
TG
18233 }
18234
18235 return cost;
18236}
b6c9286a 18237
cbe26ab8 18238/* The function returns a true if INSN is microcoded.
839a4992 18239 Return false otherwise. */
cbe26ab8
DN
18240
18241static bool
18242is_microcoded_insn (rtx insn)
18243{
18244 if (!insn || !INSN_P (insn)
18245 || GET_CODE (PATTERN (insn)) == USE
18246 || GET_CODE (PATTERN (insn)) == CLOBBER)
18247 return false;
18248
d296e02e
AP
18249 if (rs6000_cpu_attr == CPU_CELL)
18250 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18251
ec507f2d 18252 if (rs6000_sched_groups)
cbe26ab8
DN
18253 {
18254 enum attr_type type = get_attr_type (insn);
18255 if (type == TYPE_LOAD_EXT_U
18256 || type == TYPE_LOAD_EXT_UX
18257 || type == TYPE_LOAD_UX
18258 || type == TYPE_STORE_UX
18259 || type == TYPE_MFCR)
c4ad648e 18260 return true;
cbe26ab8
DN
18261 }
18262
18263 return false;
18264}
18265
cbe26ab8
DN
18266/* The function returns true if INSN is cracked into 2 instructions
18267 by the processor (and therefore occupies 2 issue slots). */
18268
18269static bool
18270is_cracked_insn (rtx insn)
18271{
18272 if (!insn || !INSN_P (insn)
18273 || GET_CODE (PATTERN (insn)) == USE
18274 || GET_CODE (PATTERN (insn)) == CLOBBER)
18275 return false;
18276
ec507f2d 18277 if (rs6000_sched_groups)
cbe26ab8
DN
18278 {
18279 enum attr_type type = get_attr_type (insn);
18280 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18281 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18282 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18283 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18284 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18285 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18286 || type == TYPE_IDIV || type == TYPE_LDIV
18287 || type == TYPE_INSERT_WORD)
18288 return true;
cbe26ab8
DN
18289 }
18290
18291 return false;
18292}
18293
18294/* The function returns true if INSN can be issued only from
a3c9585f 18295 the branch slot. */
cbe26ab8
DN
18296
18297static bool
18298is_branch_slot_insn (rtx insn)
18299{
18300 if (!insn || !INSN_P (insn)
18301 || GET_CODE (PATTERN (insn)) == USE
18302 || GET_CODE (PATTERN (insn)) == CLOBBER)
18303 return false;
18304
ec507f2d 18305 if (rs6000_sched_groups)
cbe26ab8
DN
18306 {
18307 enum attr_type type = get_attr_type (insn);
18308 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18309 return true;
cbe26ab8
DN
18310 return false;
18311 }
18312
18313 return false;
18314}
79ae11c4 18315
44cd321e
PS
18316/* The function returns true if out_inst sets a value that is
18317 used in the address generation computation of in_insn */
18318static bool
18319set_to_load_agen (rtx out_insn, rtx in_insn)
18320{
18321 rtx out_set, in_set;
18322
18323 /* For performance reasons, only handle the simple case where
18324 both loads are a single_set. */
18325 out_set = single_set (out_insn);
18326 if (out_set)
18327 {
18328 in_set = single_set (in_insn);
18329 if (in_set)
18330 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18331 }
18332
18333 return false;
18334}
18335
18336/* The function returns true if the target storage location of
18337 out_insn is adjacent to the target storage location of in_insn */
18338/* Return 1 if memory locations are adjacent. */
18339
18340static bool
18341adjacent_mem_locations (rtx insn1, rtx insn2)
18342{
18343
e3a0e200
PB
18344 rtx a = get_store_dest (PATTERN (insn1));
18345 rtx b = get_store_dest (PATTERN (insn2));
18346
44cd321e
PS
18347 if ((GET_CODE (XEXP (a, 0)) == REG
18348 || (GET_CODE (XEXP (a, 0)) == PLUS
18349 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18350 && (GET_CODE (XEXP (b, 0)) == REG
18351 || (GET_CODE (XEXP (b, 0)) == PLUS
18352 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18353 {
f98e8938 18354 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18355 rtx reg0, reg1;
44cd321e
PS
18356
18357 if (GET_CODE (XEXP (a, 0)) == PLUS)
18358 {
18359 reg0 = XEXP (XEXP (a, 0), 0);
18360 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18361 }
18362 else
18363 reg0 = XEXP (a, 0);
18364
18365 if (GET_CODE (XEXP (b, 0)) == PLUS)
18366 {
18367 reg1 = XEXP (XEXP (b, 0), 0);
18368 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18369 }
18370 else
18371 reg1 = XEXP (b, 0);
18372
18373 val_diff = val1 - val0;
18374
18375 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18376 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18377 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18378 }
18379
18380 return false;
18381}
18382
a4f6c312 18383/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18384 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18385 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18386 define this macro if you do not need to adjust the scheduling
18387 priorities of insns. */
bef84347 18388
c237e94a 18389static int
a2369ed3 18390rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18391{
a4f6c312
SS
18392 /* On machines (like the 750) which have asymmetric integer units,
18393 where one integer unit can do multiply and divides and the other
18394 can't, reduce the priority of multiply/divide so it is scheduled
18395 before other integer operations. */
bef84347
VM
18396
18397#if 0
2c3c49de 18398 if (! INSN_P (insn))
bef84347
VM
18399 return priority;
18400
18401 if (GET_CODE (PATTERN (insn)) == USE)
18402 return priority;
18403
18404 switch (rs6000_cpu_attr) {
18405 case CPU_PPC750:
18406 switch (get_attr_type (insn))
18407 {
18408 default:
18409 break;
18410
18411 case TYPE_IMUL:
18412 case TYPE_IDIV:
3cb999d8
DE
18413 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18414 priority, priority);
bef84347
VM
18415 if (priority >= 0 && priority < 0x01000000)
18416 priority >>= 3;
18417 break;
18418 }
18419 }
18420#endif
18421
44cd321e 18422 if (insn_must_be_first_in_group (insn)
79ae11c4 18423 && reload_completed
f676971a 18424 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18425 && rs6000_sched_restricted_insns_priority)
18426 {
18427
c4ad648e
AM
18428 /* Prioritize insns that can be dispatched only in the first
18429 dispatch slot. */
79ae11c4 18430 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18431 /* Attach highest priority to insn. This means that in
18432 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18433 precede 'priority' (critical path) considerations. */
f676971a 18434 return current_sched_info->sched_max_insns_priority;
79ae11c4 18435 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18436 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18437 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18438 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18439 return (priority + 1);
18440 }
79ae11c4 18441
44cd321e
PS
18442 if (rs6000_cpu == PROCESSOR_POWER6
18443 && ((load_store_pendulum == -2 && is_load_insn (insn))
18444 || (load_store_pendulum == 2 && is_store_insn (insn))))
18445 /* Attach highest priority to insn if the scheduler has just issued two
18446 stores and this instruction is a load, or two loads and this instruction
18447 is a store. Power6 wants loads and stores scheduled alternately
18448 when possible */
18449 return current_sched_info->sched_max_insns_priority;
18450
bef84347
VM
18451 return priority;
18452}
18453
d296e02e
AP
18454/* Return true if the instruction is nonpipelined on the Cell. */
18455static bool
18456is_nonpipeline_insn (rtx insn)
18457{
18458 enum attr_type type;
18459 if (!insn || !INSN_P (insn)
18460 || GET_CODE (PATTERN (insn)) == USE
18461 || GET_CODE (PATTERN (insn)) == CLOBBER)
18462 return false;
18463
18464 type = get_attr_type (insn);
18465 if (type == TYPE_IMUL
18466 || type == TYPE_IMUL2
18467 || type == TYPE_IMUL3
18468 || type == TYPE_LMUL
18469 || type == TYPE_IDIV
18470 || type == TYPE_LDIV
18471 || type == TYPE_SDIV
18472 || type == TYPE_DDIV
18473 || type == TYPE_SSQRT
18474 || type == TYPE_DSQRT
18475 || type == TYPE_MFCR
18476 || type == TYPE_MFCRF
18477 || type == TYPE_MFJMPR)
18478 {
18479 return true;
18480 }
18481 return false;
18482}
18483
18484
a4f6c312
SS
18485/* Return how many instructions the machine can issue per cycle. */
18486
c237e94a 18487static int
863d938c 18488rs6000_issue_rate (void)
b6c9286a 18489{
3317bab1
DE
18490 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18491 if (!reload_completed)
18492 return 1;
18493
b6c9286a 18494 switch (rs6000_cpu_attr) {
3cb999d8
DE
18495 case CPU_RIOS1: /* ? */
18496 case CPU_RS64A:
18497 case CPU_PPC601: /* ? */
ed947a96 18498 case CPU_PPC7450:
3cb999d8 18499 return 3;
b54cf83a 18500 case CPU_PPC440:
b6c9286a 18501 case CPU_PPC603:
bef84347 18502 case CPU_PPC750:
ed947a96 18503 case CPU_PPC7400:
be12c2b0 18504 case CPU_PPC8540:
d296e02e 18505 case CPU_CELL:
fa41c305
EW
18506 case CPU_PPCE300C2:
18507 case CPU_PPCE300C3:
f676971a 18508 return 2;
3cb999d8 18509 case CPU_RIOS2:
b6c9286a 18510 case CPU_PPC604:
19684119 18511 case CPU_PPC604E:
b6c9286a 18512 case CPU_PPC620:
3cb999d8 18513 case CPU_PPC630:
b6c9286a 18514 return 4;
cbe26ab8 18515 case CPU_POWER4:
ec507f2d 18516 case CPU_POWER5:
44cd321e 18517 case CPU_POWER6:
cbe26ab8 18518 return 5;
b6c9286a
MM
18519 default:
18520 return 1;
18521 }
18522}
18523
be12c2b0
VM
18524/* Return how many instructions to look ahead for better insn
18525 scheduling. */
18526
18527static int
863d938c 18528rs6000_use_sched_lookahead (void)
be12c2b0
VM
18529{
18530 if (rs6000_cpu_attr == CPU_PPC8540)
18531 return 4;
d296e02e
AP
18532 if (rs6000_cpu_attr == CPU_CELL)
18533 return (reload_completed ? 8 : 0);
be12c2b0
VM
18534 return 0;
18535}
18536
d296e02e
AP
18537/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18538static int
18539rs6000_use_sched_lookahead_guard (rtx insn)
18540{
18541 if (rs6000_cpu_attr != CPU_CELL)
18542 return 1;
18543
18544 if (insn == NULL_RTX || !INSN_P (insn))
18545 abort ();
982afe02 18546
d296e02e
AP
18547 if (!reload_completed
18548 || is_nonpipeline_insn (insn)
18549 || is_microcoded_insn (insn))
18550 return 0;
18551
18552 return 1;
18553}
18554
569fa502
DN
18555/* Determine is PAT refers to memory. */
18556
18557static bool
18558is_mem_ref (rtx pat)
18559{
18560 const char * fmt;
18561 int i, j;
18562 bool ret = false;
18563
1de59bbd
DE
18564 /* stack_tie does not produce any real memory traffic. */
18565 if (GET_CODE (pat) == UNSPEC
18566 && XINT (pat, 1) == UNSPEC_TIE)
18567 return false;
18568
569fa502
DN
18569 if (GET_CODE (pat) == MEM)
18570 return true;
18571
18572 /* Recursively process the pattern. */
18573 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18574
18575 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18576 {
18577 if (fmt[i] == 'e')
18578 ret |= is_mem_ref (XEXP (pat, i));
18579 else if (fmt[i] == 'E')
18580 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18581 ret |= is_mem_ref (XVECEXP (pat, i, j));
18582 }
18583
18584 return ret;
18585}
18586
18587/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18588
569fa502
DN
18589static bool
18590is_load_insn1 (rtx pat)
18591{
18592 if (!pat || pat == NULL_RTX)
18593 return false;
18594
18595 if (GET_CODE (pat) == SET)
18596 return is_mem_ref (SET_SRC (pat));
18597
18598 if (GET_CODE (pat) == PARALLEL)
18599 {
18600 int i;
18601
18602 for (i = 0; i < XVECLEN (pat, 0); i++)
18603 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18604 return true;
18605 }
18606
18607 return false;
18608}
18609
18610/* Determine if INSN loads from memory. */
18611
18612static bool
18613is_load_insn (rtx insn)
18614{
18615 if (!insn || !INSN_P (insn))
18616 return false;
18617
18618 if (GET_CODE (insn) == CALL_INSN)
18619 return false;
18620
18621 return is_load_insn1 (PATTERN (insn));
18622}
18623
18624/* Determine if PAT is a PATTERN of a store insn. */
18625
18626static bool
18627is_store_insn1 (rtx pat)
18628{
18629 if (!pat || pat == NULL_RTX)
18630 return false;
18631
18632 if (GET_CODE (pat) == SET)
18633 return is_mem_ref (SET_DEST (pat));
18634
18635 if (GET_CODE (pat) == PARALLEL)
18636 {
18637 int i;
18638
18639 for (i = 0; i < XVECLEN (pat, 0); i++)
18640 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18641 return true;
18642 }
18643
18644 return false;
18645}
18646
18647/* Determine if INSN stores to memory. */
18648
18649static bool
18650is_store_insn (rtx insn)
18651{
18652 if (!insn || !INSN_P (insn))
18653 return false;
18654
18655 return is_store_insn1 (PATTERN (insn));
18656}
18657
e3a0e200
PB
18658/* Return the dest of a store insn. */
18659
18660static rtx
18661get_store_dest (rtx pat)
18662{
18663 gcc_assert (is_store_insn1 (pat));
18664
18665 if (GET_CODE (pat) == SET)
18666 return SET_DEST (pat);
18667 else if (GET_CODE (pat) == PARALLEL)
18668 {
18669 int i;
18670
18671 for (i = 0; i < XVECLEN (pat, 0); i++)
18672 {
18673 rtx inner_pat = XVECEXP (pat, 0, i);
18674 if (GET_CODE (inner_pat) == SET
18675 && is_mem_ref (SET_DEST (inner_pat)))
18676 return inner_pat;
18677 }
18678 }
18679 /* We shouldn't get here, because we should have either a simple
18680 store insn or a store with update which are covered above. */
18681 gcc_unreachable();
18682}
18683
569fa502
DN
18684/* Returns whether the dependence between INSN and NEXT is considered
18685 costly by the given target. */
18686
18687static bool
b198261f 18688rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18689{
b198261f
MK
18690 rtx insn;
18691 rtx next;
18692
aabcd309 18693 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18694 allow all dependent insns in the same group.
569fa502
DN
18695 This is the most aggressive option. */
18696 if (rs6000_sched_costly_dep == no_dep_costly)
18697 return false;
18698
f676971a 18699 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18700 do not allow dependent instructions in the same group.
18701 This is the most conservative option. */
18702 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18703 return true;
569fa502 18704
b198261f
MK
18705 insn = DEP_PRO (dep);
18706 next = DEP_CON (dep);
18707
f676971a
EC
18708 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18709 && is_load_insn (next)
569fa502
DN
18710 && is_store_insn (insn))
18711 /* Prevent load after store in the same group. */
18712 return true;
18713
18714 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18715 && is_load_insn (next)
569fa502 18716 && is_store_insn (insn)
e2f6ff94 18717 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18718 /* Prevent load after store in the same group if it is a true
18719 dependence. */
569fa502 18720 return true;
f676971a
EC
18721
18722 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18723 and will not be scheduled in the same group. */
18724 if (rs6000_sched_costly_dep <= max_dep_latency
18725 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18726 return true;
18727
18728 return false;
18729}
18730
f676971a 18731/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18732 skipping any "non-active" insns - insns that will not actually occupy
18733 an issue slot. Return NULL_RTX if such an insn is not found. */
18734
18735static rtx
18736get_next_active_insn (rtx insn, rtx tail)
18737{
f489aff8 18738 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18739 return NULL_RTX;
18740
f489aff8 18741 while (1)
cbe26ab8 18742 {
f489aff8
AM
18743 insn = NEXT_INSN (insn);
18744 if (insn == NULL_RTX || insn == tail)
18745 return NULL_RTX;
cbe26ab8 18746
f489aff8
AM
18747 if (CALL_P (insn)
18748 || JUMP_P (insn)
18749 || (NONJUMP_INSN_P (insn)
18750 && GET_CODE (PATTERN (insn)) != USE
18751 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18752 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18753 break;
18754 }
18755 return insn;
cbe26ab8
DN
18756}
18757
44cd321e
PS
18758/* We are about to begin issuing insns for this clock cycle. */
18759
18760static int
18761rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18762 rtx *ready ATTRIBUTE_UNUSED,
18763 int *pn_ready ATTRIBUTE_UNUSED,
18764 int clock_var ATTRIBUTE_UNUSED)
18765{
d296e02e
AP
18766 int n_ready = *pn_ready;
18767
44cd321e
PS
18768 if (sched_verbose)
18769 fprintf (dump, "// rs6000_sched_reorder :\n");
18770
d296e02e
AP
18771 /* Reorder the ready list, if the second to last ready insn
18772 is a nonepipeline insn. */
18773 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18774 {
18775 if (is_nonpipeline_insn (ready[n_ready - 1])
18776 && (recog_memoized (ready[n_ready - 2]) > 0))
18777 /* Simply swap first two insns. */
18778 {
18779 rtx tmp = ready[n_ready - 1];
18780 ready[n_ready - 1] = ready[n_ready - 2];
18781 ready[n_ready - 2] = tmp;
18782 }
18783 }
18784
44cd321e
PS
18785 if (rs6000_cpu == PROCESSOR_POWER6)
18786 load_store_pendulum = 0;
18787
18788 return rs6000_issue_rate ();
18789}
18790
18791/* Like rs6000_sched_reorder, but called after issuing each insn. */
18792
18793static int
18794rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18795 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18796{
18797 if (sched_verbose)
18798 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18799
18800 /* For Power6, we need to handle some special cases to try and keep the
18801 store queue from overflowing and triggering expensive flushes.
18802
18803 This code monitors how load and store instructions are being issued
18804 and skews the ready list one way or the other to increase the likelihood
18805 that a desired instruction is issued at the proper time.
18806
18807 A couple of things are done. First, we maintain a "load_store_pendulum"
18808 to track the current state of load/store issue.
18809
18810 - If the pendulum is at zero, then no loads or stores have been
18811 issued in the current cycle so we do nothing.
18812
18813 - If the pendulum is 1, then a single load has been issued in this
18814 cycle and we attempt to locate another load in the ready list to
18815 issue with it.
18816
2f8e468b 18817 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18818 issued in this cycle, so we increase the priority of the first load
18819 in the ready list to increase it's likelihood of being chosen first
18820 in the next cycle.
18821
18822 - If the pendulum is -1, then a single store has been issued in this
18823 cycle and we attempt to locate another store in the ready list to
18824 issue with it, preferring a store to an adjacent memory location to
18825 facilitate store pairing in the store queue.
18826
18827 - If the pendulum is 2, then two loads have already been
18828 issued in this cycle, so we increase the priority of the first store
18829 in the ready list to increase it's likelihood of being chosen first
18830 in the next cycle.
18831
18832 - If the pendulum < -2 or > 2, then do nothing.
18833
18834 Note: This code covers the most common scenarios. There exist non
18835 load/store instructions which make use of the LSU and which
18836 would need to be accounted for to strictly model the behavior
18837 of the machine. Those instructions are currently unaccounted
18838 for to help minimize compile time overhead of this code.
18839 */
18840 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18841 {
18842 int pos;
18843 int i;
18844 rtx tmp;
18845
18846 if (is_store_insn (last_scheduled_insn))
18847 /* Issuing a store, swing the load_store_pendulum to the left */
18848 load_store_pendulum--;
18849 else if (is_load_insn (last_scheduled_insn))
18850 /* Issuing a load, swing the load_store_pendulum to the right */
18851 load_store_pendulum++;
18852 else
18853 return cached_can_issue_more;
18854
18855 /* If the pendulum is balanced, or there is only one instruction on
18856 the ready list, then all is well, so return. */
18857 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18858 return cached_can_issue_more;
18859
18860 if (load_store_pendulum == 1)
18861 {
18862 /* A load has been issued in this cycle. Scan the ready list
18863 for another load to issue with it */
18864 pos = *pn_ready-1;
18865
18866 while (pos >= 0)
18867 {
18868 if (is_load_insn (ready[pos]))
18869 {
18870 /* Found a load. Move it to the head of the ready list,
18871 and adjust it's priority so that it is more likely to
18872 stay there */
18873 tmp = ready[pos];
18874 for (i=pos; i<*pn_ready-1; i++)
18875 ready[i] = ready[i + 1];
18876 ready[*pn_ready-1] = tmp;
18877 if INSN_PRIORITY_KNOWN (tmp)
18878 INSN_PRIORITY (tmp)++;
18879 break;
18880 }
18881 pos--;
18882 }
18883 }
18884 else if (load_store_pendulum == -2)
18885 {
18886 /* Two stores have been issued in this cycle. Increase the
18887 priority of the first load in the ready list to favor it for
18888 issuing in the next cycle. */
18889 pos = *pn_ready-1;
18890
18891 while (pos >= 0)
18892 {
18893 if (is_load_insn (ready[pos])
18894 && INSN_PRIORITY_KNOWN (ready[pos]))
18895 {
18896 INSN_PRIORITY (ready[pos])++;
18897
18898 /* Adjust the pendulum to account for the fact that a load
18899 was found and increased in priority. This is to prevent
18900 increasing the priority of multiple loads */
18901 load_store_pendulum--;
18902
18903 break;
18904 }
18905 pos--;
18906 }
18907 }
18908 else if (load_store_pendulum == -1)
18909 {
18910 /* A store has been issued in this cycle. Scan the ready list for
18911 another store to issue with it, preferring a store to an adjacent
18912 memory location */
18913 int first_store_pos = -1;
18914
18915 pos = *pn_ready-1;
18916
18917 while (pos >= 0)
18918 {
18919 if (is_store_insn (ready[pos]))
18920 {
18921 /* Maintain the index of the first store found on the
18922 list */
18923 if (first_store_pos == -1)
18924 first_store_pos = pos;
18925
18926 if (is_store_insn (last_scheduled_insn)
18927 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18928 {
18929 /* Found an adjacent store. Move it to the head of the
18930 ready list, and adjust it's priority so that it is
18931 more likely to stay there */
18932 tmp = ready[pos];
18933 for (i=pos; i<*pn_ready-1; i++)
18934 ready[i] = ready[i + 1];
18935 ready[*pn_ready-1] = tmp;
18936 if INSN_PRIORITY_KNOWN (tmp)
18937 INSN_PRIORITY (tmp)++;
18938 first_store_pos = -1;
18939
18940 break;
18941 };
18942 }
18943 pos--;
18944 }
18945
18946 if (first_store_pos >= 0)
18947 {
18948 /* An adjacent store wasn't found, but a non-adjacent store was,
18949 so move the non-adjacent store to the front of the ready
18950 list, and adjust its priority so that it is more likely to
18951 stay there. */
18952 tmp = ready[first_store_pos];
18953 for (i=first_store_pos; i<*pn_ready-1; i++)
18954 ready[i] = ready[i + 1];
18955 ready[*pn_ready-1] = tmp;
18956 if INSN_PRIORITY_KNOWN (tmp)
18957 INSN_PRIORITY (tmp)++;
18958 }
18959 }
18960 else if (load_store_pendulum == 2)
18961 {
18962 /* Two loads have been issued in this cycle. Increase the priority
18963 of the first store in the ready list to favor it for issuing in
18964 the next cycle. */
18965 pos = *pn_ready-1;
18966
18967 while (pos >= 0)
18968 {
18969 if (is_store_insn (ready[pos])
18970 && INSN_PRIORITY_KNOWN (ready[pos]))
18971 {
18972 INSN_PRIORITY (ready[pos])++;
18973
18974 /* Adjust the pendulum to account for the fact that a store
18975 was found and increased in priority. This is to prevent
18976 increasing the priority of multiple stores */
18977 load_store_pendulum++;
18978
18979 break;
18980 }
18981 pos--;
18982 }
18983 }
18984 }
18985
18986 return cached_can_issue_more;
18987}
18988
839a4992 18989/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18990 of group WHICH_GROUP.
18991
18992 If WHICH_GROUP == current_group, this function will return true if INSN
18993 causes the termination of the current group (i.e, the dispatch group to
18994 which INSN belongs). This means that INSN will be the last insn in the
18995 group it belongs to.
18996
18997 If WHICH_GROUP == previous_group, this function will return true if INSN
18998 causes the termination of the previous group (i.e, the dispatch group that
18999 precedes the group to which INSN belongs). This means that INSN will be
19000 the first insn in the group it belongs to). */
19001
19002static bool
19003insn_terminates_group_p (rtx insn, enum group_termination which_group)
19004{
44cd321e 19005 bool first, last;
cbe26ab8
DN
19006
19007 if (! insn)
19008 return false;
569fa502 19009
44cd321e
PS
19010 first = insn_must_be_first_in_group (insn);
19011 last = insn_must_be_last_in_group (insn);
cbe26ab8 19012
44cd321e 19013 if (first && last)
cbe26ab8
DN
19014 return true;
19015
19016 if (which_group == current_group)
44cd321e 19017 return last;
cbe26ab8 19018 else if (which_group == previous_group)
44cd321e
PS
19019 return first;
19020
19021 return false;
19022}
19023
19024
19025static bool
19026insn_must_be_first_in_group (rtx insn)
19027{
19028 enum attr_type type;
19029
19030 if (!insn
19031 || insn == NULL_RTX
19032 || GET_CODE (insn) == NOTE
19033 || GET_CODE (PATTERN (insn)) == USE
19034 || GET_CODE (PATTERN (insn)) == CLOBBER)
19035 return false;
19036
19037 switch (rs6000_cpu)
cbe26ab8 19038 {
44cd321e
PS
19039 case PROCESSOR_POWER5:
19040 if (is_cracked_insn (insn))
19041 return true;
19042 case PROCESSOR_POWER4:
19043 if (is_microcoded_insn (insn))
19044 return true;
19045
19046 if (!rs6000_sched_groups)
19047 return false;
19048
19049 type = get_attr_type (insn);
19050
19051 switch (type)
19052 {
19053 case TYPE_MFCR:
19054 case TYPE_MFCRF:
19055 case TYPE_MTCR:
19056 case TYPE_DELAYED_CR:
19057 case TYPE_CR_LOGICAL:
19058 case TYPE_MTJMPR:
19059 case TYPE_MFJMPR:
19060 case TYPE_IDIV:
19061 case TYPE_LDIV:
19062 case TYPE_LOAD_L:
19063 case TYPE_STORE_C:
19064 case TYPE_ISYNC:
19065 case TYPE_SYNC:
19066 return true;
19067 default:
19068 break;
19069 }
19070 break;
19071 case PROCESSOR_POWER6:
19072 type = get_attr_type (insn);
19073
19074 switch (type)
19075 {
19076 case TYPE_INSERT_DWORD:
19077 case TYPE_EXTS:
19078 case TYPE_CNTLZ:
19079 case TYPE_SHIFT:
19080 case TYPE_VAR_SHIFT_ROTATE:
19081 case TYPE_TRAP:
19082 case TYPE_IMUL:
19083 case TYPE_IMUL2:
19084 case TYPE_IMUL3:
19085 case TYPE_LMUL:
19086 case TYPE_IDIV:
19087 case TYPE_INSERT_WORD:
19088 case TYPE_DELAYED_COMPARE:
19089 case TYPE_IMUL_COMPARE:
19090 case TYPE_LMUL_COMPARE:
19091 case TYPE_FPCOMPARE:
19092 case TYPE_MFCR:
19093 case TYPE_MTCR:
19094 case TYPE_MFJMPR:
19095 case TYPE_MTJMPR:
19096 case TYPE_ISYNC:
19097 case TYPE_SYNC:
19098 case TYPE_LOAD_L:
19099 case TYPE_STORE_C:
19100 case TYPE_LOAD_U:
19101 case TYPE_LOAD_UX:
19102 case TYPE_LOAD_EXT_UX:
19103 case TYPE_STORE_U:
19104 case TYPE_STORE_UX:
19105 case TYPE_FPLOAD_U:
19106 case TYPE_FPLOAD_UX:
19107 case TYPE_FPSTORE_U:
19108 case TYPE_FPSTORE_UX:
19109 return true;
19110 default:
19111 break;
19112 }
19113 break;
19114 default:
19115 break;
19116 }
19117
19118 return false;
19119}
19120
19121static bool
19122insn_must_be_last_in_group (rtx insn)
19123{
19124 enum attr_type type;
19125
19126 if (!insn
19127 || insn == NULL_RTX
19128 || GET_CODE (insn) == NOTE
19129 || GET_CODE (PATTERN (insn)) == USE
19130 || GET_CODE (PATTERN (insn)) == CLOBBER)
19131 return false;
19132
19133 switch (rs6000_cpu) {
19134 case PROCESSOR_POWER4:
19135 case PROCESSOR_POWER5:
19136 if (is_microcoded_insn (insn))
19137 return true;
19138
19139 if (is_branch_slot_insn (insn))
19140 return true;
19141
19142 break;
19143 case PROCESSOR_POWER6:
19144 type = get_attr_type (insn);
19145
19146 switch (type)
19147 {
19148 case TYPE_EXTS:
19149 case TYPE_CNTLZ:
19150 case TYPE_SHIFT:
19151 case TYPE_VAR_SHIFT_ROTATE:
19152 case TYPE_TRAP:
19153 case TYPE_IMUL:
19154 case TYPE_IMUL2:
19155 case TYPE_IMUL3:
19156 case TYPE_LMUL:
19157 case TYPE_IDIV:
19158 case TYPE_DELAYED_COMPARE:
19159 case TYPE_IMUL_COMPARE:
19160 case TYPE_LMUL_COMPARE:
19161 case TYPE_FPCOMPARE:
19162 case TYPE_MFCR:
19163 case TYPE_MTCR:
19164 case TYPE_MFJMPR:
19165 case TYPE_MTJMPR:
19166 case TYPE_ISYNC:
19167 case TYPE_SYNC:
19168 case TYPE_LOAD_L:
19169 case TYPE_STORE_C:
19170 return true;
19171 default:
19172 break;
cbe26ab8 19173 }
44cd321e
PS
19174 break;
19175 default:
19176 break;
19177 }
cbe26ab8
DN
19178
19179 return false;
19180}
19181
839a4992 19182/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19183 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19184
19185static bool
19186is_costly_group (rtx *group_insns, rtx next_insn)
19187{
19188 int i;
cbe26ab8
DN
19189 int issue_rate = rs6000_issue_rate ();
19190
19191 for (i = 0; i < issue_rate; i++)
19192 {
e2f6ff94
MK
19193 sd_iterator_def sd_it;
19194 dep_t dep;
cbe26ab8 19195 rtx insn = group_insns[i];
b198261f 19196
cbe26ab8 19197 if (!insn)
c4ad648e 19198 continue;
b198261f 19199
e2f6ff94 19200 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19201 {
b198261f
MK
19202 rtx next = DEP_CON (dep);
19203
19204 if (next == next_insn
19205 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19206 return true;
c4ad648e 19207 }
cbe26ab8
DN
19208 }
19209
19210 return false;
19211}
19212
f676971a 19213/* Utility of the function redefine_groups.
cbe26ab8
DN
19214 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19215 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19216 to keep it "far" (in a separate group) from GROUP_INSNS, following
19217 one of the following schemes, depending on the value of the flag
19218 -minsert_sched_nops = X:
19219 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19220 in order to force NEXT_INSN into a separate group.
f676971a
EC
19221 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19222 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19223 insertion (has a group just ended, how many vacant issue slots remain in the
19224 last group, and how many dispatch groups were encountered so far). */
19225
f676971a 19226static int
c4ad648e
AM
19227force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19228 rtx next_insn, bool *group_end, int can_issue_more,
19229 int *group_count)
cbe26ab8
DN
19230{
19231 rtx nop;
19232 bool force;
19233 int issue_rate = rs6000_issue_rate ();
19234 bool end = *group_end;
19235 int i;
19236
19237 if (next_insn == NULL_RTX)
19238 return can_issue_more;
19239
19240 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19241 return can_issue_more;
19242
19243 force = is_costly_group (group_insns, next_insn);
19244 if (!force)
19245 return can_issue_more;
19246
19247 if (sched_verbose > 6)
19248 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19249 *group_count ,can_issue_more);
cbe26ab8
DN
19250
19251 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19252 {
19253 if (*group_end)
c4ad648e 19254 can_issue_more = 0;
cbe26ab8
DN
19255
19256 /* Since only a branch can be issued in the last issue_slot, it is
19257 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19258 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19259 in this case the last nop will start a new group and the branch
19260 will be forced to the new group. */
cbe26ab8 19261 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19262 can_issue_more--;
cbe26ab8
DN
19263
19264 while (can_issue_more > 0)
c4ad648e 19265 {
9390387d 19266 nop = gen_nop ();
c4ad648e
AM
19267 emit_insn_before (nop, next_insn);
19268 can_issue_more--;
19269 }
cbe26ab8
DN
19270
19271 *group_end = true;
19272 return 0;
f676971a 19273 }
cbe26ab8
DN
19274
19275 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19276 {
19277 int n_nops = rs6000_sched_insert_nops;
19278
f676971a 19279 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19280 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19281 if (can_issue_more == 0)
c4ad648e 19282 can_issue_more = issue_rate;
cbe26ab8
DN
19283 can_issue_more--;
19284 if (can_issue_more == 0)
c4ad648e
AM
19285 {
19286 can_issue_more = issue_rate - 1;
19287 (*group_count)++;
19288 end = true;
19289 for (i = 0; i < issue_rate; i++)
19290 {
19291 group_insns[i] = 0;
19292 }
19293 }
cbe26ab8
DN
19294
19295 while (n_nops > 0)
c4ad648e
AM
19296 {
19297 nop = gen_nop ();
19298 emit_insn_before (nop, next_insn);
19299 if (can_issue_more == issue_rate - 1) /* new group begins */
19300 end = false;
19301 can_issue_more--;
19302 if (can_issue_more == 0)
19303 {
19304 can_issue_more = issue_rate - 1;
19305 (*group_count)++;
19306 end = true;
19307 for (i = 0; i < issue_rate; i++)
19308 {
19309 group_insns[i] = 0;
19310 }
19311 }
19312 n_nops--;
19313 }
cbe26ab8
DN
19314
19315 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19316 can_issue_more++;
cbe26ab8 19317
c4ad648e
AM
19318 /* Is next_insn going to start a new group? */
19319 *group_end
19320 = (end
cbe26ab8
DN
19321 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19322 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19323 || (can_issue_more < issue_rate &&
c4ad648e 19324 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19325 if (*group_end && end)
c4ad648e 19326 (*group_count)--;
cbe26ab8
DN
19327
19328 if (sched_verbose > 6)
c4ad648e
AM
19329 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19330 *group_count, can_issue_more);
f676971a
EC
19331 return can_issue_more;
19332 }
cbe26ab8
DN
19333
19334 return can_issue_more;
19335}
19336
19337/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19338 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19339 form in practice. It tries to achieve this synchronization by forcing the
19340 estimated processor grouping on the compiler (as opposed to the function
19341 'pad_goups' which tries to force the scheduler's grouping on the processor).
19342
19343 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19344 examines the (estimated) dispatch groups that will be formed by the processor
19345 dispatcher. It marks these group boundaries to reflect the estimated
19346 processor grouping, overriding the grouping that the scheduler had marked.
19347 Depending on the value of the flag '-minsert-sched-nops' this function can
19348 force certain insns into separate groups or force a certain distance between
19349 them by inserting nops, for example, if there exists a "costly dependence"
19350 between the insns.
19351
19352 The function estimates the group boundaries that the processor will form as
0fa2e4df 19353 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19354 each insn. A subsequent insn will start a new group if one of the following
19355 4 cases applies:
19356 - no more vacant issue slots remain in the current dispatch group.
19357 - only the last issue slot, which is the branch slot, is vacant, but the next
19358 insn is not a branch.
19359 - only the last 2 or less issue slots, including the branch slot, are vacant,
19360 which means that a cracked insn (which occupies two issue slots) can't be
19361 issued in this group.
f676971a 19362 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19363 start a new group. */
19364
19365static int
19366redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19367{
19368 rtx insn, next_insn;
19369 int issue_rate;
19370 int can_issue_more;
19371 int slot, i;
19372 bool group_end;
19373 int group_count = 0;
19374 rtx *group_insns;
19375
19376 /* Initialize. */
19377 issue_rate = rs6000_issue_rate ();
19378 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19379 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19380 {
19381 group_insns[i] = 0;
19382 }
19383 can_issue_more = issue_rate;
19384 slot = 0;
19385 insn = get_next_active_insn (prev_head_insn, tail);
19386 group_end = false;
19387
19388 while (insn != NULL_RTX)
19389 {
19390 slot = (issue_rate - can_issue_more);
19391 group_insns[slot] = insn;
19392 can_issue_more =
c4ad648e 19393 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19394 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19395 can_issue_more = 0;
cbe26ab8
DN
19396
19397 next_insn = get_next_active_insn (insn, tail);
19398 if (next_insn == NULL_RTX)
c4ad648e 19399 return group_count + 1;
cbe26ab8 19400
c4ad648e
AM
19401 /* Is next_insn going to start a new group? */
19402 group_end
19403 = (can_issue_more == 0
19404 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19405 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19406 || (can_issue_more < issue_rate &&
19407 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19408
f676971a 19409 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19410 next_insn, &group_end, can_issue_more,
19411 &group_count);
cbe26ab8
DN
19412
19413 if (group_end)
c4ad648e
AM
19414 {
19415 group_count++;
19416 can_issue_more = 0;
19417 for (i = 0; i < issue_rate; i++)
19418 {
19419 group_insns[i] = 0;
19420 }
19421 }
cbe26ab8
DN
19422
19423 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19424 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19425 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19426 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19427
19428 insn = next_insn;
19429 if (can_issue_more == 0)
c4ad648e
AM
19430 can_issue_more = issue_rate;
19431 } /* while */
cbe26ab8
DN
19432
19433 return group_count;
19434}
19435
19436/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19437 dispatch group boundaries that the scheduler had marked. Pad with nops
19438 any dispatch groups which have vacant issue slots, in order to force the
19439 scheduler's grouping on the processor dispatcher. The function
19440 returns the number of dispatch groups found. */
19441
19442static int
19443pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19444{
19445 rtx insn, next_insn;
19446 rtx nop;
19447 int issue_rate;
19448 int can_issue_more;
19449 int group_end;
19450 int group_count = 0;
19451
19452 /* Initialize issue_rate. */
19453 issue_rate = rs6000_issue_rate ();
19454 can_issue_more = issue_rate;
19455
19456 insn = get_next_active_insn (prev_head_insn, tail);
19457 next_insn = get_next_active_insn (insn, tail);
19458
19459 while (insn != NULL_RTX)
19460 {
19461 can_issue_more =
19462 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19463
19464 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19465
19466 if (next_insn == NULL_RTX)
c4ad648e 19467 break;
cbe26ab8
DN
19468
19469 if (group_end)
c4ad648e
AM
19470 {
19471 /* If the scheduler had marked group termination at this location
19472 (between insn and next_indn), and neither insn nor next_insn will
19473 force group termination, pad the group with nops to force group
19474 termination. */
19475 if (can_issue_more
19476 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19477 && !insn_terminates_group_p (insn, current_group)
19478 && !insn_terminates_group_p (next_insn, previous_group))
19479 {
9390387d 19480 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19481 can_issue_more--;
19482
19483 while (can_issue_more)
19484 {
19485 nop = gen_nop ();
19486 emit_insn_before (nop, next_insn);
19487 can_issue_more--;
19488 }
19489 }
19490
19491 can_issue_more = issue_rate;
19492 group_count++;
19493 }
cbe26ab8
DN
19494
19495 insn = next_insn;
19496 next_insn = get_next_active_insn (insn, tail);
19497 }
19498
19499 return group_count;
19500}
19501
44cd321e
PS
19502/* We're beginning a new block. Initialize data structures as necessary. */
19503
19504static void
19505rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19506 int sched_verbose ATTRIBUTE_UNUSED,
19507 int max_ready ATTRIBUTE_UNUSED)
982afe02 19508{
44cd321e
PS
19509 last_scheduled_insn = NULL_RTX;
19510 load_store_pendulum = 0;
19511}
19512
cbe26ab8
DN
19513/* The following function is called at the end of scheduling BB.
19514 After reload, it inserts nops at insn group bundling. */
19515
19516static void
38f391a5 19517rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19518{
19519 int n_groups;
19520
19521 if (sched_verbose)
19522 fprintf (dump, "=== Finishing schedule.\n");
19523
ec507f2d 19524 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19525 {
19526 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19527 return;
cbe26ab8
DN
19528
19529 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19530 n_groups = pad_groups (dump, sched_verbose,
19531 current_sched_info->prev_head,
19532 current_sched_info->next_tail);
cbe26ab8 19533 else
c4ad648e
AM
19534 n_groups = redefine_groups (dump, sched_verbose,
19535 current_sched_info->prev_head,
19536 current_sched_info->next_tail);
cbe26ab8
DN
19537
19538 if (sched_verbose >= 6)
19539 {
19540 fprintf (dump, "ngroups = %d\n", n_groups);
19541 print_rtl (dump, current_sched_info->prev_head);
19542 fprintf (dump, "Done finish_sched\n");
19543 }
19544 }
19545}
b6c9286a 19546\f
b6c9286a
MM
19547/* Length in units of the trampoline for entering a nested function. */
19548
19549int
863d938c 19550rs6000_trampoline_size (void)
b6c9286a
MM
19551{
19552 int ret = 0;
19553
19554 switch (DEFAULT_ABI)
19555 {
19556 default:
37409796 19557 gcc_unreachable ();
b6c9286a
MM
19558
19559 case ABI_AIX:
8f802bfb 19560 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19561 break;
19562
4dabc42d 19563 case ABI_DARWIN:
b6c9286a 19564 case ABI_V4:
03a7e1a5 19565 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19566 break;
b6c9286a
MM
19567 }
19568
19569 return ret;
19570}
19571
19572/* Emit RTL insns to initialize the variable parts of a trampoline.
19573 FNADDR is an RTX for the address of the function's pure code.
19574 CXT is an RTX for the static chain value for the function. */
19575
19576void
a2369ed3 19577rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19578{
8bd04c56 19579 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19580 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19581
19582 switch (DEFAULT_ABI)
19583 {
19584 default:
37409796 19585 gcc_unreachable ();
b6c9286a 19586
8bd04c56 19587/* Macros to shorten the code expansions below. */
9613eaff 19588#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19589#define MEM_PLUS(addr,offset) \
9613eaff 19590 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19591
b6c9286a
MM
19592 /* Under AIX, just build the 3 word function descriptor */
19593 case ABI_AIX:
8bd04c56 19594 {
9613eaff
SH
19595 rtx fn_reg = gen_reg_rtx (Pmode);
19596 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19597 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19598 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19599 emit_move_insn (MEM_DEREF (addr), fn_reg);
19600 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19601 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19602 }
b6c9286a
MM
19603 break;
19604
4dabc42d
TC
19605 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19606 case ABI_DARWIN:
b6c9286a 19607 case ABI_V4:
9613eaff 19608 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19609 FALSE, VOIDmode, 4,
9613eaff 19610 addr, Pmode,
eaf1bcf1 19611 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19612 fnaddr, Pmode,
19613 ctx_reg, Pmode);
b6c9286a 19614 break;
b6c9286a
MM
19615 }
19616
19617 return;
19618}
7509c759
MM
19619
19620\f
91d231cb 19621/* Table of valid machine attributes. */
a4f6c312 19622
91d231cb 19623const struct attribute_spec rs6000_attribute_table[] =
7509c759 19624{
91d231cb 19625 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19626 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19627 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19628 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19629 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19630 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19631#ifdef SUBTARGET_ATTRIBUTE_TABLE
19632 SUBTARGET_ATTRIBUTE_TABLE,
19633#endif
a5c76ee6 19634 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19635};
7509c759 19636
8bb418a3
ZL
19637/* Handle the "altivec" attribute. The attribute may have
19638 arguments as follows:
f676971a 19639
8bb418a3
ZL
19640 __attribute__((altivec(vector__)))
19641 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19642 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19643
19644 and may appear more than once (e.g., 'vector bool char') in a
19645 given declaration. */
19646
19647static tree
f90ac3f0
UP
19648rs6000_handle_altivec_attribute (tree *node,
19649 tree name ATTRIBUTE_UNUSED,
19650 tree args,
8bb418a3
ZL
19651 int flags ATTRIBUTE_UNUSED,
19652 bool *no_add_attrs)
19653{
19654 tree type = *node, result = NULL_TREE;
19655 enum machine_mode mode;
19656 int unsigned_p;
19657 char altivec_type
19658 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19659 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19660 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19661 : '?');
8bb418a3
ZL
19662
19663 while (POINTER_TYPE_P (type)
19664 || TREE_CODE (type) == FUNCTION_TYPE
19665 || TREE_CODE (type) == METHOD_TYPE
19666 || TREE_CODE (type) == ARRAY_TYPE)
19667 type = TREE_TYPE (type);
19668
19669 mode = TYPE_MODE (type);
19670
f90ac3f0
UP
19671 /* Check for invalid AltiVec type qualifiers. */
19672 if (type == long_unsigned_type_node || type == long_integer_type_node)
19673 {
19674 if (TARGET_64BIT)
19675 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19676 else if (rs6000_warn_altivec_long)
d4ee4d25 19677 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19678 }
19679 else if (type == long_long_unsigned_type_node
19680 || type == long_long_integer_type_node)
19681 error ("use of %<long long%> in AltiVec types is invalid");
19682 else if (type == double_type_node)
19683 error ("use of %<double%> in AltiVec types is invalid");
19684 else if (type == long_double_type_node)
19685 error ("use of %<long double%> in AltiVec types is invalid");
19686 else if (type == boolean_type_node)
19687 error ("use of boolean types in AltiVec types is invalid");
19688 else if (TREE_CODE (type) == COMPLEX_TYPE)
19689 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19690 else if (DECIMAL_FLOAT_MODE_P (mode))
19691 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19692
19693 switch (altivec_type)
19694 {
19695 case 'v':
8df83eae 19696 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19697 switch (mode)
19698 {
c4ad648e
AM
19699 case SImode:
19700 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19701 break;
19702 case HImode:
19703 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19704 break;
19705 case QImode:
19706 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19707 break;
19708 case SFmode: result = V4SF_type_node; break;
19709 /* If the user says 'vector int bool', we may be handed the 'bool'
19710 attribute _before_ the 'vector' attribute, and so select the
19711 proper type in the 'b' case below. */
19712 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19713 result = type;
19714 default: break;
8bb418a3
ZL
19715 }
19716 break;
19717 case 'b':
19718 switch (mode)
19719 {
c4ad648e
AM
19720 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19721 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19722 case QImode: case V16QImode: result = bool_V16QI_type_node;
19723 default: break;
8bb418a3
ZL
19724 }
19725 break;
19726 case 'p':
19727 switch (mode)
19728 {
c4ad648e
AM
19729 case V8HImode: result = pixel_V8HI_type_node;
19730 default: break;
8bb418a3
ZL
19731 }
19732 default: break;
19733 }
19734
7958a2a6
FJ
19735 if (result && result != type && TYPE_READONLY (type))
19736 result = build_qualified_type (result, TYPE_QUAL_CONST);
19737
8bb418a3
ZL
19738 *no_add_attrs = true; /* No need to hang on to the attribute. */
19739
f90ac3f0 19740 if (result)
8bb418a3
ZL
19741 *node = reconstruct_complex_type (*node, result);
19742
19743 return NULL_TREE;
19744}
19745
f18eca82
ZL
19746/* AltiVec defines four built-in scalar types that serve as vector
19747 elements; we must teach the compiler how to mangle them. */
19748
19749static const char *
3101faab 19750rs6000_mangle_type (const_tree type)
f18eca82 19751{
608063c3
JB
19752 type = TYPE_MAIN_VARIANT (type);
19753
19754 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19755 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19756 return NULL;
19757
f18eca82
ZL
19758 if (type == bool_char_type_node) return "U6__boolc";
19759 if (type == bool_short_type_node) return "U6__bools";
19760 if (type == pixel_type_node) return "u7__pixel";
19761 if (type == bool_int_type_node) return "U6__booli";
19762
337bde91
DE
19763 /* Mangle IBM extended float long double as `g' (__float128) on
19764 powerpc*-linux where long-double-64 previously was the default. */
19765 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19766 && TARGET_ELF
19767 && TARGET_LONG_DOUBLE_128
19768 && !TARGET_IEEEQUAD)
19769 return "g";
19770
f18eca82
ZL
19771 /* For all other types, use normal C++ mangling. */
19772 return NULL;
19773}
19774
a5c76ee6
ZW
19775/* Handle a "longcall" or "shortcall" attribute; arguments as in
19776 struct attribute_spec.handler. */
a4f6c312 19777
91d231cb 19778static tree
f676971a
EC
19779rs6000_handle_longcall_attribute (tree *node, tree name,
19780 tree args ATTRIBUTE_UNUSED,
19781 int flags ATTRIBUTE_UNUSED,
a2369ed3 19782 bool *no_add_attrs)
91d231cb
JM
19783{
19784 if (TREE_CODE (*node) != FUNCTION_TYPE
19785 && TREE_CODE (*node) != FIELD_DECL
19786 && TREE_CODE (*node) != TYPE_DECL)
19787 {
5c498b10 19788 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19789 IDENTIFIER_POINTER (name));
19790 *no_add_attrs = true;
19791 }
6a4cee5f 19792
91d231cb 19793 return NULL_TREE;
7509c759
MM
19794}
19795
a5c76ee6
ZW
19796/* Set longcall attributes on all functions declared when
19797 rs6000_default_long_calls is true. */
19798static void
a2369ed3 19799rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19800{
19801 if (rs6000_default_long_calls
19802 && (TREE_CODE (type) == FUNCTION_TYPE
19803 || TREE_CODE (type) == METHOD_TYPE))
19804 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19805 NULL_TREE,
19806 TYPE_ATTRIBUTES (type));
16d6f994
EC
19807
19808#if TARGET_MACHO
19809 darwin_set_default_type_attributes (type);
19810#endif
a5c76ee6
ZW
19811}
19812
3cb999d8
DE
19813/* Return a reference suitable for calling a function with the
19814 longcall attribute. */
a4f6c312 19815
9390387d 19816rtx
a2369ed3 19817rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19818{
d330fd93 19819 const char *call_name;
6a4cee5f
MM
19820 tree node;
19821
19822 if (GET_CODE (call_ref) != SYMBOL_REF)
19823 return call_ref;
19824
19825 /* System V adds '.' to the internal name, so skip them. */
19826 call_name = XSTR (call_ref, 0);
19827 if (*call_name == '.')
19828 {
19829 while (*call_name == '.')
19830 call_name++;
19831
19832 node = get_identifier (call_name);
39403d82 19833 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19834 }
19835
19836 return force_reg (Pmode, call_ref);
19837}
7509c759 19838\f
77ccdfed
EC
19839#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19840#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19841#endif
19842
19843/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19844 struct attribute_spec.handler. */
19845static tree
19846rs6000_handle_struct_attribute (tree *node, tree name,
19847 tree args ATTRIBUTE_UNUSED,
19848 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19849{
19850 tree *type = NULL;
19851 if (DECL_P (*node))
19852 {
19853 if (TREE_CODE (*node) == TYPE_DECL)
19854 type = &TREE_TYPE (*node);
19855 }
19856 else
19857 type = node;
19858
19859 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19860 || TREE_CODE (*type) == UNION_TYPE)))
19861 {
19862 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19863 *no_add_attrs = true;
19864 }
19865
19866 else if ((is_attribute_p ("ms_struct", name)
19867 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19868 || ((is_attribute_p ("gcc_struct", name)
19869 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19870 {
19871 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19872 IDENTIFIER_POINTER (name));
19873 *no_add_attrs = true;
19874 }
19875
19876 return NULL_TREE;
19877}
19878
19879static bool
3101faab 19880rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19881{
19882 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19883 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19884 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19885}
19886\f
b64a1b53
RH
19887#ifdef USING_ELFOS_H
19888
d6b5193b 19889/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19890
d6b5193b
RS
19891static void
19892rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19893{
19894 if (DEFAULT_ABI == ABI_AIX
19895 && TARGET_MINIMAL_TOC
19896 && !TARGET_RELOCATABLE)
19897 {
19898 if (!toc_initialized)
19899 {
19900 toc_initialized = 1;
19901 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19902 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19903 fprintf (asm_out_file, "\t.tc ");
19904 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19905 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19906 fprintf (asm_out_file, "\n");
19907
19908 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19909 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19910 fprintf (asm_out_file, " = .+32768\n");
19911 }
19912 else
19913 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19914 }
19915 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19916 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19917 else
19918 {
19919 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19920 if (!toc_initialized)
19921 {
19922 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19923 fprintf (asm_out_file, " = .+32768\n");
19924 toc_initialized = 1;
19925 }
19926 }
19927}
19928
19929/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19930
b64a1b53 19931static void
d6b5193b
RS
19932rs6000_elf_asm_init_sections (void)
19933{
19934 toc_section
19935 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19936
19937 sdata2_section
19938 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19939 SDATA2_SECTION_ASM_OP);
19940}
19941
19942/* Implement TARGET_SELECT_RTX_SECTION. */
19943
19944static section *
f676971a 19945rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19946 unsigned HOST_WIDE_INT align)
7509c759 19947{
a9098fd0 19948 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19949 return toc_section;
7509c759 19950 else
d6b5193b 19951 return default_elf_select_rtx_section (mode, x, align);
7509c759 19952}
d9407988 19953\f
d1908feb
JJ
19954/* For a SYMBOL_REF, set generic flags and then perform some
19955 target-specific processing.
19956
d1908feb
JJ
19957 When the AIX ABI is requested on a non-AIX system, replace the
19958 function name with the real name (with a leading .) rather than the
19959 function descriptor name. This saves a lot of overriding code to
19960 read the prefixes. */
d9407988 19961
fb49053f 19962static void
a2369ed3 19963rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19964{
d1908feb 19965 default_encode_section_info (decl, rtl, first);
b2003250 19966
d1908feb
JJ
19967 if (first
19968 && TREE_CODE (decl) == FUNCTION_DECL
19969 && !TARGET_AIX
19970 && DEFAULT_ABI == ABI_AIX)
d9407988 19971 {
c6a2438a 19972 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19973 size_t len = strlen (XSTR (sym_ref, 0));
19974 char *str = alloca (len + 2);
19975 str[0] = '.';
19976 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19977 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19978 }
d9407988
MM
19979}
19980
21d9bb3f
PB
19981static inline bool
19982compare_section_name (const char *section, const char *template)
19983{
19984 int len;
19985
19986 len = strlen (template);
19987 return (strncmp (section, template, len) == 0
19988 && (section[len] == 0 || section[len] == '.'));
19989}
19990
c1b7d95a 19991bool
3101faab 19992rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19993{
19994 if (rs6000_sdata == SDATA_NONE)
19995 return false;
19996
7482ad25
AF
19997 /* We want to merge strings, so we never consider them small data. */
19998 if (TREE_CODE (decl) == STRING_CST)
19999 return false;
20000
20001 /* Functions are never in the small data area. */
20002 if (TREE_CODE (decl) == FUNCTION_DECL)
20003 return false;
20004
0e5dbd9b
DE
20005 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20006 {
20007 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20008 if (compare_section_name (section, ".sdata")
20009 || compare_section_name (section, ".sdata2")
20010 || compare_section_name (section, ".gnu.linkonce.s")
20011 || compare_section_name (section, ".sbss")
20012 || compare_section_name (section, ".sbss2")
20013 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20014 || strcmp (section, ".PPC.EMB.sdata0") == 0
20015 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20016 return true;
20017 }
20018 else
20019 {
20020 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20021
20022 if (size > 0
307b599c 20023 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20024 /* If it's not public, and we're not going to reference it there,
20025 there's no need to put it in the small data section. */
0e5dbd9b
DE
20026 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20027 return true;
20028 }
20029
20030 return false;
20031}
20032
b91da81f 20033#endif /* USING_ELFOS_H */
aacd3885
RS
20034\f
20035/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20036
aacd3885 20037static bool
3101faab 20038rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20039{
20040 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20041}
a6c2a102 20042\f
000034eb 20043/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20044 ADDR can be effectively incremented by incrementing REG.
20045
20046 r0 is special and we must not select it as an address
20047 register by this routine since our caller will try to
20048 increment the returned register via an "la" instruction. */
000034eb 20049
9390387d 20050rtx
a2369ed3 20051find_addr_reg (rtx addr)
000034eb
DE
20052{
20053 while (GET_CODE (addr) == PLUS)
20054 {
02441cd6
JL
20055 if (GET_CODE (XEXP (addr, 0)) == REG
20056 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20057 addr = XEXP (addr, 0);
02441cd6
JL
20058 else if (GET_CODE (XEXP (addr, 1)) == REG
20059 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20060 addr = XEXP (addr, 1);
20061 else if (CONSTANT_P (XEXP (addr, 0)))
20062 addr = XEXP (addr, 1);
20063 else if (CONSTANT_P (XEXP (addr, 1)))
20064 addr = XEXP (addr, 0);
20065 else
37409796 20066 gcc_unreachable ();
000034eb 20067 }
37409796
NS
20068 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20069 return addr;
000034eb
DE
20070}
20071
a6c2a102 20072void
a2369ed3 20073rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20074{
20075 fatal_insn ("bad address", op);
20076}
c8023011 20077
ee890fe2
SS
20078#if TARGET_MACHO
20079
efdba735 20080static tree branch_island_list = 0;
ee890fe2 20081
efdba735
SH
20082/* Remember to generate a branch island for far calls to the given
20083 function. */
ee890fe2 20084
f676971a 20085static void
c4ad648e
AM
20086add_compiler_branch_island (tree label_name, tree function_name,
20087 int line_number)
ee890fe2 20088{
efdba735 20089 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20090 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20091 TREE_CHAIN (branch_island) = branch_island_list;
20092 branch_island_list = branch_island;
ee890fe2
SS
20093}
20094
efdba735
SH
20095#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20096#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20097#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20098 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20099
efdba735
SH
20100/* Generate far-jump branch islands for everything on the
20101 branch_island_list. Invoked immediately after the last instruction
20102 of the epilogue has been emitted; the branch-islands must be
20103 appended to, and contiguous with, the function body. Mach-O stubs
20104 are generated in machopic_output_stub(). */
ee890fe2 20105
efdba735
SH
20106static void
20107macho_branch_islands (void)
20108{
20109 char tmp_buf[512];
20110 tree branch_island;
20111
20112 for (branch_island = branch_island_list;
20113 branch_island;
20114 branch_island = TREE_CHAIN (branch_island))
20115 {
20116 const char *label =
20117 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20118 const char *name =
11abc112 20119 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20120 char name_buf[512];
20121 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20122 if (name[0] == '*' || name[0] == '&')
20123 strcpy (name_buf, name+1);
20124 else
20125 {
20126 name_buf[0] = '_';
20127 strcpy (name_buf+1, name);
20128 }
20129 strcpy (tmp_buf, "\n");
20130 strcat (tmp_buf, label);
ee890fe2 20131#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20132 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20133 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20134#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20135 if (flag_pic)
20136 {
20137 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20138 strcat (tmp_buf, label);
20139 strcat (tmp_buf, "_pic\n");
20140 strcat (tmp_buf, label);
20141 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20142
efdba735
SH
20143 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20144 strcat (tmp_buf, name_buf);
20145 strcat (tmp_buf, " - ");
20146 strcat (tmp_buf, label);
20147 strcat (tmp_buf, "_pic)\n");
f676971a 20148
efdba735 20149 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20150
efdba735
SH
20151 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20152 strcat (tmp_buf, name_buf);
20153 strcat (tmp_buf, " - ");
20154 strcat (tmp_buf, label);
20155 strcat (tmp_buf, "_pic)\n");
f676971a 20156
efdba735
SH
20157 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20158 }
20159 else
20160 {
20161 strcat (tmp_buf, ":\nlis r12,hi16(");
20162 strcat (tmp_buf, name_buf);
20163 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20164 strcat (tmp_buf, name_buf);
20165 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20166 }
20167 output_asm_insn (tmp_buf, 0);
ee890fe2 20168#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20169 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20170 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20171#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20172 }
ee890fe2 20173
efdba735 20174 branch_island_list = 0;
ee890fe2
SS
20175}
20176
20177/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20178 already there or not. */
20179
efdba735 20180static int
a2369ed3 20181no_previous_def (tree function_name)
ee890fe2 20182{
efdba735
SH
20183 tree branch_island;
20184 for (branch_island = branch_island_list;
20185 branch_island;
20186 branch_island = TREE_CHAIN (branch_island))
20187 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20188 return 0;
20189 return 1;
20190}
20191
20192/* GET_PREV_LABEL gets the label name from the previous definition of
20193 the function. */
20194
efdba735 20195static tree
a2369ed3 20196get_prev_label (tree function_name)
ee890fe2 20197{
efdba735
SH
20198 tree branch_island;
20199 for (branch_island = branch_island_list;
20200 branch_island;
20201 branch_island = TREE_CHAIN (branch_island))
20202 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20203 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20204 return 0;
20205}
20206
75b1b789
MS
20207#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20208#define DARWIN_LINKER_GENERATES_ISLANDS 0
20209#endif
20210
20211/* KEXTs still need branch islands. */
20212#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20213 || flag_mkernel || flag_apple_kext)
20214
ee890fe2 20215/* INSN is either a function call or a millicode call. It may have an
f676971a 20216 unconditional jump in its delay slot.
ee890fe2
SS
20217
20218 CALL_DEST is the routine we are calling. */
20219
20220char *
c4ad648e
AM
20221output_call (rtx insn, rtx *operands, int dest_operand_number,
20222 int cookie_operand_number)
ee890fe2
SS
20223{
20224 static char buf[256];
75b1b789
MS
20225 if (DARWIN_GENERATE_ISLANDS
20226 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20227 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20228 {
20229 tree labelname;
efdba735 20230 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20231
ee890fe2
SS
20232 if (no_previous_def (funname))
20233 {
ee890fe2
SS
20234 rtx label_rtx = gen_label_rtx ();
20235 char *label_buf, temp_buf[256];
20236 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20237 CODE_LABEL_NUMBER (label_rtx));
20238 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20239 labelname = get_identifier (label_buf);
a38e7aa5 20240 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20241 }
20242 else
20243 labelname = get_prev_label (funname);
20244
efdba735
SH
20245 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20246 instruction will reach 'foo', otherwise link as 'bl L42'".
20247 "L42" should be a 'branch island', that will do a far jump to
20248 'foo'. Branch islands are generated in
20249 macho_branch_islands(). */
ee890fe2 20250 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20251 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20252 }
20253 else
efdba735
SH
20254 sprintf (buf, "bl %%z%d", dest_operand_number);
20255 return buf;
ee890fe2
SS
20256}
20257
ee890fe2
SS
20258/* Generate PIC and indirect symbol stubs. */
20259
20260void
a2369ed3 20261machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20262{
20263 unsigned int length;
a4f6c312
SS
20264 char *symbol_name, *lazy_ptr_name;
20265 char *local_label_0;
ee890fe2
SS
20266 static int label = 0;
20267
df56a27f 20268 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20269 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20270
ee890fe2 20271
ee890fe2
SS
20272 length = strlen (symb);
20273 symbol_name = alloca (length + 32);
20274 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20275
20276 lazy_ptr_name = alloca (length + 32);
20277 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20278
ee890fe2 20279 if (flag_pic == 2)
56c779bc 20280 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20281 else
56c779bc 20282 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20283
20284 if (flag_pic == 2)
20285 {
d974312d
DJ
20286 fprintf (file, "\t.align 5\n");
20287
20288 fprintf (file, "%s:\n", stub);
20289 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20290
876455fa 20291 label++;
89da1f32 20292 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20293 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20294
ee890fe2
SS
20295 fprintf (file, "\tmflr r0\n");
20296 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20297 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20298 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20299 lazy_ptr_name, local_label_0);
20300 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20301 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20302 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20303 lazy_ptr_name, local_label_0);
20304 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20305 fprintf (file, "\tbctr\n");
20306 }
20307 else
d974312d
DJ
20308 {
20309 fprintf (file, "\t.align 4\n");
20310
20311 fprintf (file, "%s:\n", stub);
20312 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20313
20314 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20315 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20316 (TARGET_64BIT ? "ldu" : "lwzu"),
20317 lazy_ptr_name);
d974312d
DJ
20318 fprintf (file, "\tmtctr r12\n");
20319 fprintf (file, "\tbctr\n");
20320 }
f676971a 20321
56c779bc 20322 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20323 fprintf (file, "%s:\n", lazy_ptr_name);
20324 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20325 fprintf (file, "%sdyld_stub_binding_helper\n",
20326 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20327}
20328
20329/* Legitimize PIC addresses. If the address is already
20330 position-independent, we return ORIG. Newly generated
20331 position-independent addresses go into a reg. This is REG if non
20332 zero, otherwise we allocate register(s) as necessary. */
20333
4fbbe694 20334#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20335
20336rtx
f676971a 20337rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20338 rtx reg)
ee890fe2
SS
20339{
20340 rtx base, offset;
20341
20342 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20343 reg = gen_reg_rtx (Pmode);
20344
20345 if (GET_CODE (orig) == CONST)
20346 {
37409796
NS
20347 rtx reg_temp;
20348
ee890fe2
SS
20349 if (GET_CODE (XEXP (orig, 0)) == PLUS
20350 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20351 return orig;
20352
37409796 20353 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20354
37409796
NS
20355 /* Use a different reg for the intermediate value, as
20356 it will be marked UNCHANGING. */
b3a13419 20357 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20358 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20359 Pmode, reg_temp);
20360 offset =
20361 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20362 Pmode, reg);
bb8df8a6 20363
ee890fe2
SS
20364 if (GET_CODE (offset) == CONST_INT)
20365 {
20366 if (SMALL_INT (offset))
ed8908e7 20367 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20368 else if (! reload_in_progress && ! reload_completed)
20369 offset = force_reg (Pmode, offset);
20370 else
c859cda6
DJ
20371 {
20372 rtx mem = force_const_mem (Pmode, orig);
20373 return machopic_legitimize_pic_address (mem, Pmode, reg);
20374 }
ee890fe2 20375 }
f1c25d3b 20376 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20377 }
20378
20379 /* Fall back on generic machopic code. */
20380 return machopic_legitimize_pic_address (orig, mode, reg);
20381}
20382
c4e18b1c
GK
20383/* Output a .machine directive for the Darwin assembler, and call
20384 the generic start_file routine. */
20385
20386static void
20387rs6000_darwin_file_start (void)
20388{
94ff898d 20389 static const struct
c4e18b1c
GK
20390 {
20391 const char *arg;
20392 const char *name;
20393 int if_set;
20394 } mapping[] = {
55dbfb48 20395 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20396 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20397 { "power4", "ppc970", 0 },
20398 { "G5", "ppc970", 0 },
20399 { "7450", "ppc7450", 0 },
20400 { "7400", "ppc7400", MASK_ALTIVEC },
20401 { "G4", "ppc7400", 0 },
20402 { "750", "ppc750", 0 },
20403 { "740", "ppc750", 0 },
20404 { "G3", "ppc750", 0 },
20405 { "604e", "ppc604e", 0 },
20406 { "604", "ppc604", 0 },
20407 { "603e", "ppc603", 0 },
20408 { "603", "ppc603", 0 },
20409 { "601", "ppc601", 0 },
20410 { NULL, "ppc", 0 } };
20411 const char *cpu_id = "";
20412 size_t i;
94ff898d 20413
9390387d 20414 rs6000_file_start ();
192d0f89 20415 darwin_file_start ();
c4e18b1c
GK
20416
20417 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20418 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20419 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20420 && rs6000_select[i].string[0] != '\0')
20421 cpu_id = rs6000_select[i].string;
20422
20423 /* Look through the mapping array. Pick the first name that either
20424 matches the argument, has a bit set in IF_SET that is also set
20425 in the target flags, or has a NULL name. */
20426
20427 i = 0;
20428 while (mapping[i].arg != NULL
20429 && strcmp (mapping[i].arg, cpu_id) != 0
20430 && (mapping[i].if_set & target_flags) == 0)
20431 i++;
20432
20433 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20434}
20435
ee890fe2 20436#endif /* TARGET_MACHO */
7c262518
RH
20437
20438#if TARGET_ELF
9b580a0b
RH
20439static int
20440rs6000_elf_reloc_rw_mask (void)
7c262518 20441{
9b580a0b
RH
20442 if (flag_pic)
20443 return 3;
20444 else if (DEFAULT_ABI == ABI_AIX)
20445 return 2;
20446 else
20447 return 0;
7c262518 20448}
d9f6800d
RH
20449
20450/* Record an element in the table of global constructors. SYMBOL is
20451 a SYMBOL_REF of the function to be called; PRIORITY is a number
20452 between 0 and MAX_INIT_PRIORITY.
20453
20454 This differs from default_named_section_asm_out_constructor in
20455 that we have special handling for -mrelocatable. */
20456
20457static void
a2369ed3 20458rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20459{
20460 const char *section = ".ctors";
20461 char buf[16];
20462
20463 if (priority != DEFAULT_INIT_PRIORITY)
20464 {
20465 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20466 /* Invert the numbering so the linker puts us in the proper
20467 order; constructors are run from right to left, and the
20468 linker sorts in increasing order. */
20469 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20470 section = buf;
20471 }
20472
d6b5193b 20473 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20474 assemble_align (POINTER_SIZE);
d9f6800d
RH
20475
20476 if (TARGET_RELOCATABLE)
20477 {
20478 fputs ("\t.long (", asm_out_file);
20479 output_addr_const (asm_out_file, symbol);
20480 fputs (")@fixup\n", asm_out_file);
20481 }
20482 else
c8af3574 20483 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20484}
20485
20486static void
a2369ed3 20487rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20488{
20489 const char *section = ".dtors";
20490 char buf[16];
20491
20492 if (priority != DEFAULT_INIT_PRIORITY)
20493 {
20494 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20495 /* Invert the numbering so the linker puts us in the proper
20496 order; constructors are run from right to left, and the
20497 linker sorts in increasing order. */
20498 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20499 section = buf;
20500 }
20501
d6b5193b 20502 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20503 assemble_align (POINTER_SIZE);
d9f6800d
RH
20504
20505 if (TARGET_RELOCATABLE)
20506 {
20507 fputs ("\t.long (", asm_out_file);
20508 output_addr_const (asm_out_file, symbol);
20509 fputs (")@fixup\n", asm_out_file);
20510 }
20511 else
c8af3574 20512 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20513}
9739c90c
JJ
20514
20515void
a2369ed3 20516rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20517{
20518 if (TARGET_64BIT)
20519 {
20520 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20521 ASM_OUTPUT_LABEL (file, name);
20522 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20523 rs6000_output_function_entry (file, name);
20524 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20525 if (DOT_SYMBOLS)
9739c90c 20526 {
85b776df 20527 fputs ("\t.size\t", file);
9739c90c 20528 assemble_name (file, name);
85b776df
AM
20529 fputs (",24\n\t.type\t.", file);
20530 assemble_name (file, name);
20531 fputs (",@function\n", file);
20532 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20533 {
20534 fputs ("\t.globl\t.", file);
20535 assemble_name (file, name);
20536 putc ('\n', file);
20537 }
9739c90c 20538 }
85b776df
AM
20539 else
20540 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20541 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20542 rs6000_output_function_entry (file, name);
20543 fputs (":\n", file);
9739c90c
JJ
20544 return;
20545 }
20546
20547 if (TARGET_RELOCATABLE
7f970b70 20548 && !TARGET_SECURE_PLT
9739c90c 20549 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20550 && uses_TOC ())
9739c90c
JJ
20551 {
20552 char buf[256];
20553
20554 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20555
20556 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20557 fprintf (file, "\t.long ");
20558 assemble_name (file, buf);
20559 putc ('-', file);
20560 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20561 assemble_name (file, buf);
20562 putc ('\n', file);
20563 }
20564
20565 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20566 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20567
20568 if (DEFAULT_ABI == ABI_AIX)
20569 {
20570 const char *desc_name, *orig_name;
20571
20572 orig_name = (*targetm.strip_name_encoding) (name);
20573 desc_name = orig_name;
20574 while (*desc_name == '.')
20575 desc_name++;
20576
20577 if (TREE_PUBLIC (decl))
20578 fprintf (file, "\t.globl %s\n", desc_name);
20579
20580 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20581 fprintf (file, "%s:\n", desc_name);
20582 fprintf (file, "\t.long %s\n", orig_name);
20583 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20584 if (DEFAULT_ABI == ABI_AIX)
20585 fputs ("\t.long 0\n", file);
20586 fprintf (file, "\t.previous\n");
20587 }
20588 ASM_OUTPUT_LABEL (file, name);
20589}
1334b570
AM
20590
20591static void
20592rs6000_elf_end_indicate_exec_stack (void)
20593{
20594 if (TARGET_32BIT)
20595 file_end_indicate_exec_stack ();
20596}
7c262518
RH
20597#endif
20598
cbaaba19 20599#if TARGET_XCOFF
0d5817b2
DE
20600static void
20601rs6000_xcoff_asm_output_anchor (rtx symbol)
20602{
20603 char buffer[100];
20604
20605 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20606 SYMBOL_REF_BLOCK_OFFSET (symbol));
20607 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20608}
20609
7c262518 20610static void
a2369ed3 20611rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20612{
20613 fputs (GLOBAL_ASM_OP, stream);
20614 RS6000_OUTPUT_BASENAME (stream, name);
20615 putc ('\n', stream);
20616}
20617
d6b5193b
RS
20618/* A get_unnamed_decl callback, used for read-only sections. PTR
20619 points to the section string variable. */
20620
20621static void
20622rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20623{
890f9edf
OH
20624 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20625 *(const char *const *) directive,
20626 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20627}
20628
20629/* Likewise for read-write sections. */
20630
20631static void
20632rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20633{
890f9edf
OH
20634 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20635 *(const char *const *) directive,
20636 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20637}
20638
20639/* A get_unnamed_section callback, used for switching to toc_section. */
20640
20641static void
20642rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20643{
20644 if (TARGET_MINIMAL_TOC)
20645 {
20646 /* toc_section is always selected at least once from
20647 rs6000_xcoff_file_start, so this is guaranteed to
20648 always be defined once and only once in each file. */
20649 if (!toc_initialized)
20650 {
20651 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20652 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20653 toc_initialized = 1;
20654 }
20655 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20656 (TARGET_32BIT ? "" : ",3"));
20657 }
20658 else
20659 fputs ("\t.toc\n", asm_out_file);
20660}
20661
20662/* Implement TARGET_ASM_INIT_SECTIONS. */
20663
20664static void
20665rs6000_xcoff_asm_init_sections (void)
20666{
20667 read_only_data_section
20668 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20669 &xcoff_read_only_section_name);
20670
20671 private_data_section
20672 = get_unnamed_section (SECTION_WRITE,
20673 rs6000_xcoff_output_readwrite_section_asm_op,
20674 &xcoff_private_data_section_name);
20675
20676 read_only_private_data_section
20677 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20678 &xcoff_private_data_section_name);
20679
20680 toc_section
20681 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20682
20683 readonly_data_section = read_only_data_section;
20684 exception_section = data_section;
20685}
20686
9b580a0b
RH
20687static int
20688rs6000_xcoff_reloc_rw_mask (void)
20689{
20690 return 3;
20691}
20692
b275d088 20693static void
c18a5b6c
MM
20694rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20695 tree decl ATTRIBUTE_UNUSED)
7c262518 20696{
0e5dbd9b
DE
20697 int smclass;
20698 static const char * const suffix[3] = { "PR", "RO", "RW" };
20699
20700 if (flags & SECTION_CODE)
20701 smclass = 0;
20702 else if (flags & SECTION_WRITE)
20703 smclass = 2;
20704 else
20705 smclass = 1;
20706
5b5198f7 20707 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20708 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20709 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20710}
ae46c4e0 20711
d6b5193b 20712static section *
f676971a 20713rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20714 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20715{
9b580a0b 20716 if (decl_readonly_section (decl, reloc))
ae46c4e0 20717 {
0e5dbd9b 20718 if (TREE_PUBLIC (decl))
d6b5193b 20719 return read_only_data_section;
ae46c4e0 20720 else
d6b5193b 20721 return read_only_private_data_section;
ae46c4e0
RH
20722 }
20723 else
20724 {
0e5dbd9b 20725 if (TREE_PUBLIC (decl))
d6b5193b 20726 return data_section;
ae46c4e0 20727 else
d6b5193b 20728 return private_data_section;
ae46c4e0
RH
20729 }
20730}
20731
20732static void
a2369ed3 20733rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20734{
20735 const char *name;
ae46c4e0 20736
5b5198f7
DE
20737 /* Use select_section for private and uninitialized data. */
20738 if (!TREE_PUBLIC (decl)
20739 || DECL_COMMON (decl)
0e5dbd9b
DE
20740 || DECL_INITIAL (decl) == NULL_TREE
20741 || DECL_INITIAL (decl) == error_mark_node
20742 || (flag_zero_initialized_in_bss
20743 && initializer_zerop (DECL_INITIAL (decl))))
20744 return;
20745
20746 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20747 name = (*targetm.strip_name_encoding) (name);
20748 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20749}
b64a1b53 20750
fb49053f
RH
20751/* Select section for constant in constant pool.
20752
20753 On RS/6000, all constants are in the private read-only data area.
20754 However, if this is being placed in the TOC it must be output as a
20755 toc entry. */
20756
d6b5193b 20757static section *
f676971a 20758rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20759 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20760{
20761 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20762 return toc_section;
b64a1b53 20763 else
d6b5193b 20764 return read_only_private_data_section;
b64a1b53 20765}
772c5265
RH
20766
20767/* Remove any trailing [DS] or the like from the symbol name. */
20768
20769static const char *
a2369ed3 20770rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20771{
20772 size_t len;
20773 if (*name == '*')
20774 name++;
20775 len = strlen (name);
20776 if (name[len - 1] == ']')
20777 return ggc_alloc_string (name, len - 4);
20778 else
20779 return name;
20780}
20781
5add3202
DE
20782/* Section attributes. AIX is always PIC. */
20783
20784static unsigned int
a2369ed3 20785rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20786{
5b5198f7 20787 unsigned int align;
9b580a0b 20788 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20789
20790 /* Align to at least UNIT size. */
20791 if (flags & SECTION_CODE)
20792 align = MIN_UNITS_PER_WORD;
20793 else
20794 /* Increase alignment of large objects if not already stricter. */
20795 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20796 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20797 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20798
20799 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20800}
a5fe455b 20801
1bc7c5b6
ZW
20802/* Output at beginning of assembler file.
20803
20804 Initialize the section names for the RS/6000 at this point.
20805
20806 Specify filename, including full path, to assembler.
20807
20808 We want to go into the TOC section so at least one .toc will be emitted.
20809 Also, in order to output proper .bs/.es pairs, we need at least one static
20810 [RW] section emitted.
20811
20812 Finally, declare mcount when profiling to make the assembler happy. */
20813
20814static void
863d938c 20815rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20816{
20817 rs6000_gen_section_name (&xcoff_bss_section_name,
20818 main_input_filename, ".bss_");
20819 rs6000_gen_section_name (&xcoff_private_data_section_name,
20820 main_input_filename, ".rw_");
20821 rs6000_gen_section_name (&xcoff_read_only_section_name,
20822 main_input_filename, ".ro_");
20823
20824 fputs ("\t.file\t", asm_out_file);
20825 output_quoted_string (asm_out_file, main_input_filename);
20826 fputc ('\n', asm_out_file);
1bc7c5b6 20827 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20828 switch_to_section (private_data_section);
20829 switch_to_section (text_section);
1bc7c5b6
ZW
20830 if (profile_flag)
20831 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20832 rs6000_file_start ();
20833}
20834
a5fe455b
ZW
20835/* Output at end of assembler file.
20836 On the RS/6000, referencing data should automatically pull in text. */
20837
20838static void
863d938c 20839rs6000_xcoff_file_end (void)
a5fe455b 20840{
d6b5193b 20841 switch_to_section (text_section);
a5fe455b 20842 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20843 switch_to_section (data_section);
a5fe455b
ZW
20844 fputs (TARGET_32BIT
20845 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20846 asm_out_file);
20847}
f1384257 20848#endif /* TARGET_XCOFF */
0e5dbd9b 20849
3c50106f
RH
20850/* Compute a (partial) cost for rtx X. Return true if the complete
20851 cost has been computed, and false if subexpressions should be
20852 scanned. In either case, *TOTAL contains the cost result. */
20853
20854static bool
1494c534 20855rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20856{
f0517163
RS
20857 enum machine_mode mode = GET_MODE (x);
20858
3c50106f
RH
20859 switch (code)
20860 {
30a555d9 20861 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20862 case CONST_INT:
066cd967
DE
20863 if (((outer_code == SET
20864 || outer_code == PLUS
20865 || outer_code == MINUS)
279bb624
DE
20866 && (satisfies_constraint_I (x)
20867 || satisfies_constraint_L (x)))
066cd967 20868 || (outer_code == AND
279bb624
DE
20869 && (satisfies_constraint_K (x)
20870 || (mode == SImode
20871 ? satisfies_constraint_L (x)
20872 : satisfies_constraint_J (x))
1990cd79
AM
20873 || mask_operand (x, mode)
20874 || (mode == DImode
20875 && mask64_operand (x, DImode))))
22e54023 20876 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20877 && (satisfies_constraint_K (x)
20878 || (mode == SImode
20879 ? satisfies_constraint_L (x)
20880 : satisfies_constraint_J (x))))
066cd967
DE
20881 || outer_code == ASHIFT
20882 || outer_code == ASHIFTRT
20883 || outer_code == LSHIFTRT
20884 || outer_code == ROTATE
20885 || outer_code == ROTATERT
d5861a7a 20886 || outer_code == ZERO_EXTRACT
066cd967 20887 || (outer_code == MULT
279bb624 20888 && satisfies_constraint_I (x))
22e54023
DE
20889 || ((outer_code == DIV || outer_code == UDIV
20890 || outer_code == MOD || outer_code == UMOD)
20891 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20892 || (outer_code == COMPARE
279bb624
DE
20893 && (satisfies_constraint_I (x)
20894 || satisfies_constraint_K (x)))
22e54023 20895 || (outer_code == EQ
279bb624
DE
20896 && (satisfies_constraint_I (x)
20897 || satisfies_constraint_K (x)
20898 || (mode == SImode
20899 ? satisfies_constraint_L (x)
20900 : satisfies_constraint_J (x))))
22e54023 20901 || (outer_code == GTU
279bb624 20902 && satisfies_constraint_I (x))
22e54023 20903 || (outer_code == LTU
279bb624 20904 && satisfies_constraint_P (x)))
066cd967
DE
20905 {
20906 *total = 0;
20907 return true;
20908 }
20909 else if ((outer_code == PLUS
4ae234b0 20910 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20911 || (outer_code == MINUS
4ae234b0 20912 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20913 || ((outer_code == SET
20914 || outer_code == IOR
20915 || outer_code == XOR)
20916 && (INTVAL (x)
20917 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20918 {
20919 *total = COSTS_N_INSNS (1);
20920 return true;
20921 }
20922 /* FALLTHRU */
20923
20924 case CONST_DOUBLE:
f6fe3a22 20925 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20926 {
f6fe3a22
DE
20927 if ((outer_code == IOR || outer_code == XOR)
20928 && CONST_DOUBLE_HIGH (x) == 0
20929 && (CONST_DOUBLE_LOW (x)
20930 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20931 {
20932 *total = 0;
20933 return true;
20934 }
20935 else if ((outer_code == AND && and64_2_operand (x, DImode))
20936 || ((outer_code == SET
20937 || outer_code == IOR
20938 || outer_code == XOR)
20939 && CONST_DOUBLE_HIGH (x) == 0))
20940 {
20941 *total = COSTS_N_INSNS (1);
20942 return true;
20943 }
066cd967
DE
20944 }
20945 /* FALLTHRU */
20946
3c50106f 20947 case CONST:
066cd967 20948 case HIGH:
3c50106f 20949 case SYMBOL_REF:
066cd967
DE
20950 case MEM:
20951 /* When optimizing for size, MEM should be slightly more expensive
20952 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20953 L1 cache latency is about two instructions. */
066cd967 20954 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20955 return true;
20956
30a555d9
DE
20957 case LABEL_REF:
20958 *total = 0;
20959 return true;
20960
3c50106f 20961 case PLUS:
f0517163 20962 if (mode == DFmode)
066cd967
DE
20963 {
20964 if (GET_CODE (XEXP (x, 0)) == MULT)
20965 {
20966 /* FNMA accounted in outer NEG. */
20967 if (outer_code == NEG)
20968 *total = rs6000_cost->dmul - rs6000_cost->fp;
20969 else
20970 *total = rs6000_cost->dmul;
20971 }
20972 else
20973 *total = rs6000_cost->fp;
20974 }
f0517163 20975 else if (mode == SFmode)
066cd967
DE
20976 {
20977 /* FNMA accounted in outer NEG. */
20978 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20979 *total = 0;
20980 else
20981 *total = rs6000_cost->fp;
20982 }
f0517163 20983 else
066cd967
DE
20984 *total = COSTS_N_INSNS (1);
20985 return false;
3c50106f 20986
52190329 20987 case MINUS:
f0517163 20988 if (mode == DFmode)
066cd967 20989 {
762c919f
JM
20990 if (GET_CODE (XEXP (x, 0)) == MULT
20991 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20992 {
20993 /* FNMA accounted in outer NEG. */
20994 if (outer_code == NEG)
762c919f 20995 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20996 else
20997 *total = rs6000_cost->dmul;
20998 }
20999 else
21000 *total = rs6000_cost->fp;
21001 }
f0517163 21002 else if (mode == SFmode)
066cd967
DE
21003 {
21004 /* FNMA accounted in outer NEG. */
21005 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21006 *total = 0;
21007 else
21008 *total = rs6000_cost->fp;
21009 }
f0517163 21010 else
c4ad648e 21011 *total = COSTS_N_INSNS (1);
066cd967 21012 return false;
3c50106f
RH
21013
21014 case MULT:
c9dbf840 21015 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21016 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21017 {
8b897cfa
RS
21018 if (INTVAL (XEXP (x, 1)) >= -256
21019 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21020 *total = rs6000_cost->mulsi_const9;
8b897cfa 21021 else
06a67bdd 21022 *total = rs6000_cost->mulsi_const;
3c50106f 21023 }
066cd967
DE
21024 /* FMA accounted in outer PLUS/MINUS. */
21025 else if ((mode == DFmode || mode == SFmode)
21026 && (outer_code == PLUS || outer_code == MINUS))
21027 *total = 0;
f0517163 21028 else if (mode == DFmode)
06a67bdd 21029 *total = rs6000_cost->dmul;
f0517163 21030 else if (mode == SFmode)
06a67bdd 21031 *total = rs6000_cost->fp;
f0517163 21032 else if (mode == DImode)
06a67bdd 21033 *total = rs6000_cost->muldi;
8b897cfa 21034 else
06a67bdd 21035 *total = rs6000_cost->mulsi;
066cd967 21036 return false;
3c50106f
RH
21037
21038 case DIV:
21039 case MOD:
f0517163
RS
21040 if (FLOAT_MODE_P (mode))
21041 {
06a67bdd
RS
21042 *total = mode == DFmode ? rs6000_cost->ddiv
21043 : rs6000_cost->sdiv;
066cd967 21044 return false;
f0517163 21045 }
5efb1046 21046 /* FALLTHRU */
3c50106f
RH
21047
21048 case UDIV:
21049 case UMOD:
627b6fe2
DJ
21050 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21051 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21052 {
21053 if (code == DIV || code == MOD)
21054 /* Shift, addze */
21055 *total = COSTS_N_INSNS (2);
21056 else
21057 /* Shift */
21058 *total = COSTS_N_INSNS (1);
21059 }
c4ad648e 21060 else
627b6fe2
DJ
21061 {
21062 if (GET_MODE (XEXP (x, 1)) == DImode)
21063 *total = rs6000_cost->divdi;
21064 else
21065 *total = rs6000_cost->divsi;
21066 }
21067 /* Add in shift and subtract for MOD. */
21068 if (code == MOD || code == UMOD)
21069 *total += COSTS_N_INSNS (2);
066cd967 21070 return false;
3c50106f 21071
32f56aad 21072 case CTZ:
3c50106f
RH
21073 case FFS:
21074 *total = COSTS_N_INSNS (4);
066cd967 21075 return false;
3c50106f 21076
32f56aad
DE
21077 case POPCOUNT:
21078 *total = COSTS_N_INSNS (6);
21079 return false;
21080
06a67bdd 21081 case NOT:
066cd967
DE
21082 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21083 {
21084 *total = 0;
21085 return false;
21086 }
21087 /* FALLTHRU */
21088
21089 case AND:
32f56aad 21090 case CLZ:
066cd967
DE
21091 case IOR:
21092 case XOR:
d5861a7a
DE
21093 case ZERO_EXTRACT:
21094 *total = COSTS_N_INSNS (1);
21095 return false;
21096
066cd967
DE
21097 case ASHIFT:
21098 case ASHIFTRT:
21099 case LSHIFTRT:
21100 case ROTATE:
21101 case ROTATERT:
d5861a7a 21102 /* Handle mul_highpart. */
066cd967
DE
21103 if (outer_code == TRUNCATE
21104 && GET_CODE (XEXP (x, 0)) == MULT)
21105 {
21106 if (mode == DImode)
21107 *total = rs6000_cost->muldi;
21108 else
21109 *total = rs6000_cost->mulsi;
21110 return true;
21111 }
d5861a7a
DE
21112 else if (outer_code == AND)
21113 *total = 0;
21114 else
21115 *total = COSTS_N_INSNS (1);
21116 return false;
21117
21118 case SIGN_EXTEND:
21119 case ZERO_EXTEND:
21120 if (GET_CODE (XEXP (x, 0)) == MEM)
21121 *total = 0;
21122 else
21123 *total = COSTS_N_INSNS (1);
066cd967 21124 return false;
06a67bdd 21125
066cd967
DE
21126 case COMPARE:
21127 case NEG:
21128 case ABS:
21129 if (!FLOAT_MODE_P (mode))
21130 {
21131 *total = COSTS_N_INSNS (1);
21132 return false;
21133 }
21134 /* FALLTHRU */
21135
21136 case FLOAT:
21137 case UNSIGNED_FLOAT:
21138 case FIX:
21139 case UNSIGNED_FIX:
06a67bdd
RS
21140 case FLOAT_TRUNCATE:
21141 *total = rs6000_cost->fp;
066cd967 21142 return false;
06a67bdd 21143
a2af5043
DJ
21144 case FLOAT_EXTEND:
21145 if (mode == DFmode)
21146 *total = 0;
21147 else
21148 *total = rs6000_cost->fp;
21149 return false;
21150
06a67bdd
RS
21151 case UNSPEC:
21152 switch (XINT (x, 1))
21153 {
21154 case UNSPEC_FRSP:
21155 *total = rs6000_cost->fp;
21156 return true;
21157
21158 default:
21159 break;
21160 }
21161 break;
21162
21163 case CALL:
21164 case IF_THEN_ELSE:
21165 if (optimize_size)
21166 {
21167 *total = COSTS_N_INSNS (1);
21168 return true;
21169 }
066cd967
DE
21170 else if (FLOAT_MODE_P (mode)
21171 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21172 {
21173 *total = rs6000_cost->fp;
21174 return false;
21175 }
06a67bdd
RS
21176 break;
21177
c0600ecd
DE
21178 case EQ:
21179 case GTU:
21180 case LTU:
22e54023
DE
21181 /* Carry bit requires mode == Pmode.
21182 NEG or PLUS already counted so only add one. */
21183 if (mode == Pmode
21184 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21185 {
22e54023
DE
21186 *total = COSTS_N_INSNS (1);
21187 return true;
21188 }
21189 if (outer_code == SET)
21190 {
21191 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21192 {
22e54023 21193 *total = COSTS_N_INSNS (2);
c0600ecd 21194 return true;
c0600ecd 21195 }
22e54023
DE
21196 else if (mode == Pmode)
21197 {
21198 *total = COSTS_N_INSNS (3);
21199 return false;
21200 }
21201 }
21202 /* FALLTHRU */
21203
21204 case GT:
21205 case LT:
21206 case UNORDERED:
21207 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21208 {
21209 *total = COSTS_N_INSNS (2);
21210 return true;
c0600ecd 21211 }
22e54023
DE
21212 /* CC COMPARE. */
21213 if (outer_code == COMPARE)
21214 {
21215 *total = 0;
21216 return true;
21217 }
21218 break;
c0600ecd 21219
3c50106f 21220 default:
06a67bdd 21221 break;
3c50106f 21222 }
06a67bdd
RS
21223
21224 return false;
3c50106f
RH
21225}
21226
34bb030a
DE
21227/* A C expression returning the cost of moving data from a register of class
21228 CLASS1 to one of CLASS2. */
21229
21230int
f676971a 21231rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21232 enum reg_class from, enum reg_class to)
34bb030a
DE
21233{
21234 /* Moves from/to GENERAL_REGS. */
21235 if (reg_classes_intersect_p (to, GENERAL_REGS)
21236 || reg_classes_intersect_p (from, GENERAL_REGS))
21237 {
21238 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21239 from = to;
21240
21241 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21242 return (rs6000_memory_move_cost (mode, from, 0)
21243 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21244
c4ad648e
AM
21245 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21246 shift. */
34bb030a
DE
21247 else if (from == CR_REGS)
21248 return 4;
21249
21250 else
c4ad648e 21251 /* A move will cost one instruction per GPR moved. */
c8b622ff 21252 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21253 }
21254
c4ad648e 21255 /* Moving between two similar registers is just one instruction. */
34bb030a 21256 else if (reg_classes_intersect_p (to, from))
7393f7f8 21257 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21258
c4ad648e 21259 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21260 else
f676971a 21261 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21262 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21263}
21264
21265/* A C expressions returning the cost of moving data of MODE from a register to
21266 or from memory. */
21267
21268int
f676971a 21269rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21270 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21271{
21272 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21273 return 4 * hard_regno_nregs[0][mode];
34bb030a 21274 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21275 return 4 * hard_regno_nregs[32][mode];
34bb030a 21276 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21277 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21278 else
21279 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21280}
21281
9c78b944
DE
21282/* Returns a code for a target-specific builtin that implements
21283 reciprocal of the function, or NULL_TREE if not available. */
21284
21285static tree
21286rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21287 bool sqrt ATTRIBUTE_UNUSED)
21288{
21289 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21290 && flag_finite_math_only && !flag_trapping_math
21291 && flag_unsafe_math_optimizations))
21292 return NULL_TREE;
21293
21294 if (md_fn)
21295 return NULL_TREE;
21296 else
21297 switch (fn)
21298 {
21299 case BUILT_IN_SQRTF:
21300 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21301
21302 default:
21303 return NULL_TREE;
21304 }
21305}
21306
ef765ea9
DE
21307/* Newton-Raphson approximation of single-precision floating point divide n/d.
21308 Assumes no trapping math and finite arguments. */
21309
21310void
9c78b944 21311rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21312{
21313 rtx x0, e0, e1, y1, u0, v0, one;
21314
21315 x0 = gen_reg_rtx (SFmode);
21316 e0 = gen_reg_rtx (SFmode);
21317 e1 = gen_reg_rtx (SFmode);
21318 y1 = gen_reg_rtx (SFmode);
21319 u0 = gen_reg_rtx (SFmode);
21320 v0 = gen_reg_rtx (SFmode);
21321 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21322
21323 /* x0 = 1./d estimate */
21324 emit_insn (gen_rtx_SET (VOIDmode, x0,
21325 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21326 UNSPEC_FRES)));
21327 /* e0 = 1. - d * x0 */
21328 emit_insn (gen_rtx_SET (VOIDmode, e0,
21329 gen_rtx_MINUS (SFmode, one,
21330 gen_rtx_MULT (SFmode, d, x0))));
21331 /* e1 = e0 + e0 * e0 */
21332 emit_insn (gen_rtx_SET (VOIDmode, e1,
21333 gen_rtx_PLUS (SFmode,
21334 gen_rtx_MULT (SFmode, e0, e0), e0)));
21335 /* y1 = x0 + e1 * x0 */
21336 emit_insn (gen_rtx_SET (VOIDmode, y1,
21337 gen_rtx_PLUS (SFmode,
21338 gen_rtx_MULT (SFmode, e1, x0), x0)));
21339 /* u0 = n * y1 */
21340 emit_insn (gen_rtx_SET (VOIDmode, u0,
21341 gen_rtx_MULT (SFmode, n, y1)));
21342 /* v0 = n - d * u0 */
21343 emit_insn (gen_rtx_SET (VOIDmode, v0,
21344 gen_rtx_MINUS (SFmode, n,
21345 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21346 /* dst = u0 + v0 * y1 */
21347 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21348 gen_rtx_PLUS (SFmode,
21349 gen_rtx_MULT (SFmode, v0, y1), u0)));
21350}
21351
21352/* Newton-Raphson approximation of double-precision floating point divide n/d.
21353 Assumes no trapping math and finite arguments. */
21354
21355void
9c78b944 21356rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21357{
21358 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21359
21360 x0 = gen_reg_rtx (DFmode);
21361 e0 = gen_reg_rtx (DFmode);
21362 e1 = gen_reg_rtx (DFmode);
21363 e2 = gen_reg_rtx (DFmode);
21364 y1 = gen_reg_rtx (DFmode);
21365 y2 = gen_reg_rtx (DFmode);
21366 y3 = gen_reg_rtx (DFmode);
21367 u0 = gen_reg_rtx (DFmode);
21368 v0 = gen_reg_rtx (DFmode);
21369 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21370
21371 /* x0 = 1./d estimate */
21372 emit_insn (gen_rtx_SET (VOIDmode, x0,
21373 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21374 UNSPEC_FRES)));
21375 /* e0 = 1. - d * x0 */
21376 emit_insn (gen_rtx_SET (VOIDmode, e0,
21377 gen_rtx_MINUS (DFmode, one,
21378 gen_rtx_MULT (SFmode, d, x0))));
21379 /* y1 = x0 + e0 * x0 */
21380 emit_insn (gen_rtx_SET (VOIDmode, y1,
21381 gen_rtx_PLUS (DFmode,
21382 gen_rtx_MULT (DFmode, e0, x0), x0)));
21383 /* e1 = e0 * e0 */
21384 emit_insn (gen_rtx_SET (VOIDmode, e1,
21385 gen_rtx_MULT (DFmode, e0, e0)));
21386 /* y2 = y1 + e1 * y1 */
21387 emit_insn (gen_rtx_SET (VOIDmode, y2,
21388 gen_rtx_PLUS (DFmode,
21389 gen_rtx_MULT (DFmode, e1, y1), y1)));
21390 /* e2 = e1 * e1 */
21391 emit_insn (gen_rtx_SET (VOIDmode, e2,
21392 gen_rtx_MULT (DFmode, e1, e1)));
21393 /* y3 = y2 + e2 * y2 */
21394 emit_insn (gen_rtx_SET (VOIDmode, y3,
21395 gen_rtx_PLUS (DFmode,
21396 gen_rtx_MULT (DFmode, e2, y2), y2)));
21397 /* u0 = n * y3 */
21398 emit_insn (gen_rtx_SET (VOIDmode, u0,
21399 gen_rtx_MULT (DFmode, n, y3)));
21400 /* v0 = n - d * u0 */
21401 emit_insn (gen_rtx_SET (VOIDmode, v0,
21402 gen_rtx_MINUS (DFmode, n,
21403 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21404 /* dst = u0 + v0 * y3 */
21405 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21406 gen_rtx_PLUS (DFmode,
21407 gen_rtx_MULT (DFmode, v0, y3), u0)));
21408}
21409
565ef4ba 21410
9c78b944
DE
21411/* Newton-Raphson approximation of single-precision floating point rsqrt.
21412 Assumes no trapping math and finite arguments. */
21413
21414void
21415rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21416{
21417 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21418 half, one, halfthree, c1, cond, label;
21419
21420 x0 = gen_reg_rtx (SFmode);
21421 x1 = gen_reg_rtx (SFmode);
21422 x2 = gen_reg_rtx (SFmode);
21423 y1 = gen_reg_rtx (SFmode);
21424 u0 = gen_reg_rtx (SFmode);
21425 u1 = gen_reg_rtx (SFmode);
21426 u2 = gen_reg_rtx (SFmode);
21427 v0 = gen_reg_rtx (SFmode);
21428 v1 = gen_reg_rtx (SFmode);
21429 v2 = gen_reg_rtx (SFmode);
21430 t0 = gen_reg_rtx (SFmode);
21431 halfthree = gen_reg_rtx (SFmode);
21432 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21433 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21434
21435 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21436 emit_insn (gen_rtx_SET (VOIDmode, t0,
21437 gen_rtx_MULT (SFmode, src, src)));
21438
21439 emit_insn (gen_rtx_SET (VOIDmode, cond,
21440 gen_rtx_COMPARE (CCFPmode, t0, src)));
21441 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21442 emit_unlikely_jump (c1, label);
21443
21444 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21445 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21446
21447 /* halfthree = 1.5 = 1.0 + 0.5 */
21448 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21449 gen_rtx_PLUS (SFmode, one, half)));
21450
21451 /* x0 = rsqrt estimate */
21452 emit_insn (gen_rtx_SET (VOIDmode, x0,
21453 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21454 UNSPEC_RSQRT)));
21455
21456 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21457 emit_insn (gen_rtx_SET (VOIDmode, y1,
21458 gen_rtx_MINUS (SFmode,
21459 gen_rtx_MULT (SFmode, src, halfthree),
21460 src)));
21461
21462 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21463 emit_insn (gen_rtx_SET (VOIDmode, u0,
21464 gen_rtx_MULT (SFmode, x0, x0)));
21465 emit_insn (gen_rtx_SET (VOIDmode, v0,
21466 gen_rtx_MINUS (SFmode,
21467 halfthree,
21468 gen_rtx_MULT (SFmode, y1, u0))));
21469 emit_insn (gen_rtx_SET (VOIDmode, x1,
21470 gen_rtx_MULT (SFmode, x0, v0)));
21471
21472 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21473 emit_insn (gen_rtx_SET (VOIDmode, u1,
21474 gen_rtx_MULT (SFmode, x1, x1)));
21475 emit_insn (gen_rtx_SET (VOIDmode, v1,
21476 gen_rtx_MINUS (SFmode,
21477 halfthree,
21478 gen_rtx_MULT (SFmode, y1, u1))));
21479 emit_insn (gen_rtx_SET (VOIDmode, x2,
21480 gen_rtx_MULT (SFmode, x1, v1)));
21481
21482 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21483 emit_insn (gen_rtx_SET (VOIDmode, u2,
21484 gen_rtx_MULT (SFmode, x2, x2)));
21485 emit_insn (gen_rtx_SET (VOIDmode, v2,
21486 gen_rtx_MINUS (SFmode,
21487 halfthree,
21488 gen_rtx_MULT (SFmode, y1, u2))));
21489 emit_insn (gen_rtx_SET (VOIDmode, dst,
21490 gen_rtx_MULT (SFmode, x2, v2)));
21491
21492 emit_label (XEXP (label, 0));
21493}
21494
565ef4ba
RS
21495/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21496 target, and SRC is the argument operand. */
21497
21498void
21499rs6000_emit_popcount (rtx dst, rtx src)
21500{
21501 enum machine_mode mode = GET_MODE (dst);
21502 rtx tmp1, tmp2;
21503
21504 tmp1 = gen_reg_rtx (mode);
21505
21506 if (mode == SImode)
21507 {
21508 emit_insn (gen_popcntbsi2 (tmp1, src));
21509 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21510 NULL_RTX, 0);
21511 tmp2 = force_reg (SImode, tmp2);
21512 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21513 }
21514 else
21515 {
21516 emit_insn (gen_popcntbdi2 (tmp1, src));
21517 tmp2 = expand_mult (DImode, tmp1,
21518 GEN_INT ((HOST_WIDE_INT)
21519 0x01010101 << 32 | 0x01010101),
21520 NULL_RTX, 0);
21521 tmp2 = force_reg (DImode, tmp2);
21522 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21523 }
21524}
21525
21526
21527/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21528 target, and SRC is the argument operand. */
21529
21530void
21531rs6000_emit_parity (rtx dst, rtx src)
21532{
21533 enum machine_mode mode = GET_MODE (dst);
21534 rtx tmp;
21535
21536 tmp = gen_reg_rtx (mode);
21537 if (mode == SImode)
21538 {
21539 /* Is mult+shift >= shift+xor+shift+xor? */
21540 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21541 {
21542 rtx tmp1, tmp2, tmp3, tmp4;
21543
21544 tmp1 = gen_reg_rtx (SImode);
21545 emit_insn (gen_popcntbsi2 (tmp1, src));
21546
21547 tmp2 = gen_reg_rtx (SImode);
21548 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21549 tmp3 = gen_reg_rtx (SImode);
21550 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21551
21552 tmp4 = gen_reg_rtx (SImode);
21553 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21554 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21555 }
21556 else
21557 rs6000_emit_popcount (tmp, src);
21558 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21559 }
21560 else
21561 {
21562 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21563 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21564 {
21565 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21566
21567 tmp1 = gen_reg_rtx (DImode);
21568 emit_insn (gen_popcntbdi2 (tmp1, src));
21569
21570 tmp2 = gen_reg_rtx (DImode);
21571 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21572 tmp3 = gen_reg_rtx (DImode);
21573 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21574
21575 tmp4 = gen_reg_rtx (DImode);
21576 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21577 tmp5 = gen_reg_rtx (DImode);
21578 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21579
21580 tmp6 = gen_reg_rtx (DImode);
21581 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21582 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21583 }
21584 else
21585 rs6000_emit_popcount (tmp, src);
21586 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21587 }
21588}
21589
ded9bf77
AH
21590/* Return an RTX representing where to find the function value of a
21591 function returning MODE. */
21592static rtx
21593rs6000_complex_function_value (enum machine_mode mode)
21594{
21595 unsigned int regno;
21596 rtx r1, r2;
21597 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21598 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21599
18f63bfa
AH
21600 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21601 regno = FP_ARG_RETURN;
354ed18f
AH
21602 else
21603 {
18f63bfa 21604 regno = GP_ARG_RETURN;
ded9bf77 21605
18f63bfa
AH
21606 /* 32-bit is OK since it'll go in r3/r4. */
21607 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21608 return gen_rtx_REG (mode, regno);
21609 }
21610
18f63bfa
AH
21611 if (inner_bytes >= 8)
21612 return gen_rtx_REG (mode, regno);
21613
ded9bf77
AH
21614 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21615 const0_rtx);
21616 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21617 GEN_INT (inner_bytes));
ded9bf77
AH
21618 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21619}
21620
a6ebc39a
AH
21621/* Define how to find the value returned by a function.
21622 VALTYPE is the data type of the value (as a tree).
21623 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21624 otherwise, FUNC is 0.
21625
21626 On the SPE, both FPs and vectors are returned in r3.
21627
21628 On RS/6000 an integer value is in r3 and a floating-point value is in
21629 fp1, unless -msoft-float. */
21630
21631rtx
586de218 21632rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21633{
21634 enum machine_mode mode;
2a8fa26c 21635 unsigned int regno;
a6ebc39a 21636
594a51fe
SS
21637 /* Special handling for structs in darwin64. */
21638 if (rs6000_darwin64_abi
21639 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21640 && TREE_CODE (valtype) == RECORD_TYPE
21641 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21642 {
21643 CUMULATIVE_ARGS valcum;
21644 rtx valret;
21645
0b5383eb 21646 valcum.words = 0;
594a51fe
SS
21647 valcum.fregno = FP_ARG_MIN_REG;
21648 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21649 /* Do a trial code generation as if this were going to be passed as
21650 an argument; if any part goes in memory, we return NULL. */
21651 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21652 if (valret)
21653 return valret;
21654 /* Otherwise fall through to standard ABI rules. */
21655 }
21656
0e67400a
FJ
21657 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21658 {
21659 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21660 return gen_rtx_PARALLEL (DImode,
21661 gen_rtvec (2,
21662 gen_rtx_EXPR_LIST (VOIDmode,
21663 gen_rtx_REG (SImode, GP_ARG_RETURN),
21664 const0_rtx),
21665 gen_rtx_EXPR_LIST (VOIDmode,
21666 gen_rtx_REG (SImode,
21667 GP_ARG_RETURN + 1),
21668 GEN_INT (4))));
21669 }
0f086e42
FJ
21670 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21671 {
21672 return gen_rtx_PARALLEL (DCmode,
21673 gen_rtvec (4,
21674 gen_rtx_EXPR_LIST (VOIDmode,
21675 gen_rtx_REG (SImode, GP_ARG_RETURN),
21676 const0_rtx),
21677 gen_rtx_EXPR_LIST (VOIDmode,
21678 gen_rtx_REG (SImode,
21679 GP_ARG_RETURN + 1),
21680 GEN_INT (4)),
21681 gen_rtx_EXPR_LIST (VOIDmode,
21682 gen_rtx_REG (SImode,
21683 GP_ARG_RETURN + 2),
21684 GEN_INT (8)),
21685 gen_rtx_EXPR_LIST (VOIDmode,
21686 gen_rtx_REG (SImode,
21687 GP_ARG_RETURN + 3),
21688 GEN_INT (12))));
21689 }
602ea4d3 21690
7348aa7f
FXC
21691 mode = TYPE_MODE (valtype);
21692 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21693 || POINTER_TYPE_P (valtype))
b78d48dd 21694 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21695
e41b2a33
PB
21696 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21697 /* _Decimal128 must use an even/odd register pair. */
21698 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21699 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21700 regno = FP_ARG_RETURN;
ded9bf77 21701 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21702 && targetm.calls.split_complex_arg)
ded9bf77 21703 return rs6000_complex_function_value (mode);
44688022 21704 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21705 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21706 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21707 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21708 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21709 && (mode == DFmode || mode == DDmode || mode == DCmode
21710 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21711 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21712 else
21713 regno = GP_ARG_RETURN;
21714
21715 return gen_rtx_REG (mode, regno);
21716}
21717
ded9bf77
AH
21718/* Define how to find the value returned by a library function
21719 assuming the value has mode MODE. */
21720rtx
21721rs6000_libcall_value (enum machine_mode mode)
21722{
21723 unsigned int regno;
21724
2e6c9641
FJ
21725 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21726 {
21727 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21728 return gen_rtx_PARALLEL (DImode,
21729 gen_rtvec (2,
21730 gen_rtx_EXPR_LIST (VOIDmode,
21731 gen_rtx_REG (SImode, GP_ARG_RETURN),
21732 const0_rtx),
21733 gen_rtx_EXPR_LIST (VOIDmode,
21734 gen_rtx_REG (SImode,
21735 GP_ARG_RETURN + 1),
21736 GEN_INT (4))));
21737 }
21738
e41b2a33
PB
21739 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21740 /* _Decimal128 must use an even/odd register pair. */
21741 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21742 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21743 && TARGET_HARD_FLOAT && TARGET_FPRS)
21744 regno = FP_ARG_RETURN;
44688022
AM
21745 else if (ALTIVEC_VECTOR_MODE (mode)
21746 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21747 regno = ALTIVEC_ARG_RETURN;
42ba5130 21748 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21749 return rs6000_complex_function_value (mode);
18f63bfa 21750 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21751 && (mode == DFmode || mode == DDmode || mode == DCmode
21752 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21753 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21754 else
21755 regno = GP_ARG_RETURN;
21756
21757 return gen_rtx_REG (mode, regno);
21758}
21759
d1d0c603
JJ
21760/* Define the offset between two registers, FROM to be eliminated and its
21761 replacement TO, at the start of a routine. */
21762HOST_WIDE_INT
21763rs6000_initial_elimination_offset (int from, int to)
21764{
21765 rs6000_stack_t *info = rs6000_stack_info ();
21766 HOST_WIDE_INT offset;
21767
7d5175e1 21768 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21769 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21770 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21771 {
21772 offset = info->push_p ? 0 : -info->total_size;
21773 if (FRAME_GROWS_DOWNWARD)
5b667039 21774 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21775 }
21776 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21777 offset = FRAME_GROWS_DOWNWARD
5b667039 21778 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21779 : 0;
21780 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21781 offset = info->total_size;
21782 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21783 offset = info->push_p ? info->total_size : 0;
21784 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21785 offset = 0;
21786 else
37409796 21787 gcc_unreachable ();
d1d0c603
JJ
21788
21789 return offset;
21790}
21791
58646b77 21792/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21793
c8e4f0e9 21794static bool
3101faab 21795rs6000_is_opaque_type (const_tree type)
62e1dfcf 21796{
58646b77 21797 return (type == opaque_V2SI_type_node
2abe3e28 21798 || type == opaque_V2SF_type_node
58646b77
PB
21799 || type == opaque_p_V2SI_type_node
21800 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21801}
21802
96714395 21803static rtx
a2369ed3 21804rs6000_dwarf_register_span (rtx reg)
96714395
AH
21805{
21806 unsigned regno;
21807
4d4cbc0e
AH
21808 if (TARGET_SPE
21809 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21810 || (TARGET_E500_DOUBLE
21811 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21812 ;
21813 else
96714395
AH
21814 return NULL_RTX;
21815
21816 regno = REGNO (reg);
21817
21818 /* The duality of the SPE register size wreaks all kinds of havoc.
21819 This is a way of distinguishing r0 in 32-bits from r0 in
21820 64-bits. */
21821 return
21822 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21823 BYTES_BIG_ENDIAN
21824 ? gen_rtvec (2,
21825 gen_rtx_REG (SImode, regno + 1200),
21826 gen_rtx_REG (SImode, regno))
21827 : gen_rtvec (2,
21828 gen_rtx_REG (SImode, regno),
21829 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21830}
21831
37ea0b7e
JM
21832/* Fill in sizes for SPE register high parts in table used by unwinder. */
21833
21834static void
21835rs6000_init_dwarf_reg_sizes_extra (tree address)
21836{
21837 if (TARGET_SPE)
21838 {
21839 int i;
21840 enum machine_mode mode = TYPE_MODE (char_type_node);
21841 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21842 rtx mem = gen_rtx_MEM (BLKmode, addr);
21843 rtx value = gen_int_mode (4, mode);
21844
21845 for (i = 1201; i < 1232; i++)
21846 {
21847 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21848 HOST_WIDE_INT offset
21849 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21850
21851 emit_move_insn (adjust_address (mem, mode, offset), value);
21852 }
21853 }
21854}
21855
93c9d1ba
AM
21856/* Map internal gcc register numbers to DWARF2 register numbers. */
21857
21858unsigned int
21859rs6000_dbx_register_number (unsigned int regno)
21860{
21861 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21862 return regno;
21863 if (regno == MQ_REGNO)
21864 return 100;
1de43f85 21865 if (regno == LR_REGNO)
93c9d1ba 21866 return 108;
1de43f85 21867 if (regno == CTR_REGNO)
93c9d1ba
AM
21868 return 109;
21869 if (CR_REGNO_P (regno))
21870 return regno - CR0_REGNO + 86;
21871 if (regno == XER_REGNO)
21872 return 101;
21873 if (ALTIVEC_REGNO_P (regno))
21874 return regno - FIRST_ALTIVEC_REGNO + 1124;
21875 if (regno == VRSAVE_REGNO)
21876 return 356;
21877 if (regno == VSCR_REGNO)
21878 return 67;
21879 if (regno == SPE_ACC_REGNO)
21880 return 99;
21881 if (regno == SPEFSCR_REGNO)
21882 return 612;
21883 /* SPE high reg number. We get these values of regno from
21884 rs6000_dwarf_register_span. */
37409796
NS
21885 gcc_assert (regno >= 1200 && regno < 1232);
21886 return regno;
93c9d1ba
AM
21887}
21888
93f90be6 21889/* target hook eh_return_filter_mode */
f676971a 21890static enum machine_mode
93f90be6
FJ
21891rs6000_eh_return_filter_mode (void)
21892{
21893 return TARGET_32BIT ? SImode : word_mode;
21894}
21895
00b79d54
BE
21896/* Target hook for scalar_mode_supported_p. */
21897static bool
21898rs6000_scalar_mode_supported_p (enum machine_mode mode)
21899{
21900 if (DECIMAL_FLOAT_MODE_P (mode))
21901 return true;
21902 else
21903 return default_scalar_mode_supported_p (mode);
21904}
21905
f676971a
EC
21906/* Target hook for vector_mode_supported_p. */
21907static bool
21908rs6000_vector_mode_supported_p (enum machine_mode mode)
21909{
21910
96038623
DE
21911 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21912 return true;
21913
f676971a
EC
21914 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21915 return true;
21916
21917 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21918 return true;
21919
21920 else
21921 return false;
21922}
21923
bb8df8a6
EC
21924/* Target hook for invalid_arg_for_unprototyped_fn. */
21925static const char *
3101faab 21926invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21927{
21928 return (!rs6000_darwin64_abi
21929 && typelist == 0
21930 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21931 && (funcdecl == NULL_TREE
21932 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21933 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21934 ? N_("AltiVec argument passed to unprototyped function")
21935 : NULL;
21936}
21937
3aebbe5f
JJ
21938/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21939 setup by using __stack_chk_fail_local hidden function instead of
21940 calling __stack_chk_fail directly. Otherwise it is better to call
21941 __stack_chk_fail directly. */
21942
21943static tree
21944rs6000_stack_protect_fail (void)
21945{
21946 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21947 ? default_hidden_stack_protect_fail ()
21948 : default_external_stack_protect_fail ();
21949}
21950
17211ab5 21951#include "gt-rs6000.h"