]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR tree-optimization/33512 (Simple bitwise simplification missed)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
a3170dc6
AH
177/* Nonzero if we want SPE ABI extensions. */
178int rs6000_spe_abi;
179
5da702b1
AH
180/* Nonzero if floating point operations are done in the GPRs. */
181int rs6000_float_gprs = 0;
182
594a51fe
SS
183/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
184int rs6000_darwin64_abi;
185
a0ab749a 186/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 187static GTY(()) int common_mode_defined;
c81bebd7 188
9878760c
RK
189/* Save information from a "cmpxx" operation until the branch or scc is
190 emitted. */
9878760c
RK
191rtx rs6000_compare_op0, rs6000_compare_op1;
192int rs6000_compare_fp_p;
874a0744 193
874a0744
MM
194/* Label number of label created for -mrelocatable, to call to so we can
195 get the address of the GOT section */
196int rs6000_pic_labelno;
c81bebd7 197
b91da81f 198#ifdef USING_ELFOS_H
c81bebd7 199/* Which abi to adhere to */
9739c90c 200const char *rs6000_abi_name;
d9407988
MM
201
202/* Semantics of the small data area */
203enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
204
205/* Which small data model to use */
815cdc52 206const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
207
208/* Counter for labels which are to be placed in .fixup. */
209int fixuplabelno = 0;
874a0744 210#endif
4697a36c 211
c4501e62
JJ
212/* Bit size of immediate TLS offsets and string from which it is decoded. */
213int rs6000_tls_size = 32;
214const char *rs6000_tls_size_string;
215
b6c9286a
MM
216/* ABI enumeration available for subtarget to use. */
217enum rs6000_abi rs6000_current_abi;
218
85b776df
AM
219/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
220int dot_symbols;
221
38c1f2d7 222/* Debug flags */
815cdc52 223const char *rs6000_debug_name;
38c1f2d7
MM
224int rs6000_debug_stack; /* debug stack applications */
225int rs6000_debug_arg; /* debug argument handling */
226
aabcd309 227/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
228bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
229
58646b77
PB
230/* Built in types. */
231
232tree rs6000_builtin_types[RS6000_BTI_MAX];
233tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 234
57ac7be9
AM
235const char *rs6000_traceback_name;
236static enum {
237 traceback_default = 0,
238 traceback_none,
239 traceback_part,
240 traceback_full
241} rs6000_traceback;
242
38c1f2d7
MM
243/* Flag to say the TOC is initialized */
244int toc_initialized;
9ebbca7d 245char toc_label_name[10];
38c1f2d7 246
44cd321e
PS
247/* Cached value of rs6000_variable_issue. This is cached in
248 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
249static short cached_can_issue_more;
250
d6b5193b
RS
251static GTY(()) section *read_only_data_section;
252static GTY(()) section *private_data_section;
253static GTY(()) section *read_only_private_data_section;
254static GTY(()) section *sdata2_section;
255static GTY(()) section *toc_section;
256
a3c9585f
KH
257/* Control alignment for fields within structures. */
258/* String from -malign-XXXXX. */
025d9908
KH
259int rs6000_alignment_flags;
260
78f5898b
AH
261/* True for any options that were explicitly set. */
262struct {
df01da37 263 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 264 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
265 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
266 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
267 bool spe; /* True if -mspe= was used. */
268 bool float_gprs; /* True if -mfloat-gprs= was used. */
269 bool isel; /* True if -misel was used. */
270 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 271 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 272 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
273} rs6000_explicit_options;
274
a3170dc6
AH
275struct builtin_description
276{
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
280 unsigned int mask;
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
284};
8b897cfa
RS
285\f
286/* Target cpu costs. */
287
288struct processor_costs {
c4ad648e 289 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
299 const int cache_line_size; /* cache line size in bytes. */
300 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
301 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
302 const int simultaneous_prefetches; /* number of parallel prefetch
303 operations. */
8b897cfa
RS
304};
305
306const struct processor_costs *rs6000_cost;
307
308/* Processor costs (relative to an add) */
309
310/* Instruction size costs on 32bit processors. */
311static const
312struct processor_costs size32_cost = {
06a67bdd
RS
313 COSTS_N_INSNS (1), /* mulsi */
314 COSTS_N_INSNS (1), /* mulsi_const */
315 COSTS_N_INSNS (1), /* mulsi_const9 */
316 COSTS_N_INSNS (1), /* muldi */
317 COSTS_N_INSNS (1), /* divsi */
318 COSTS_N_INSNS (1), /* divdi */
319 COSTS_N_INSNS (1), /* fp */
320 COSTS_N_INSNS (1), /* dmul */
321 COSTS_N_INSNS (1), /* sdiv */
322 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
323 32,
324 0,
325 0,
5f732aba 326 0,
8b897cfa
RS
327};
328
329/* Instruction size costs on 64bit processors. */
330static const
331struct processor_costs size64_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (1), /* mulsi */
333 COSTS_N_INSNS (1), /* mulsi_const */
334 COSTS_N_INSNS (1), /* mulsi_const9 */
335 COSTS_N_INSNS (1), /* muldi */
336 COSTS_N_INSNS (1), /* divsi */
337 COSTS_N_INSNS (1), /* divdi */
338 COSTS_N_INSNS (1), /* fp */
339 COSTS_N_INSNS (1), /* dmul */
340 COSTS_N_INSNS (1), /* sdiv */
341 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
342 128,
343 0,
344 0,
5f732aba 345 0,
8b897cfa
RS
346};
347
348/* Instruction costs on RIOS1 processors. */
349static const
350struct processor_costs rios1_cost = {
06a67bdd
RS
351 COSTS_N_INSNS (5), /* mulsi */
352 COSTS_N_INSNS (4), /* mulsi_const */
353 COSTS_N_INSNS (3), /* mulsi_const9 */
354 COSTS_N_INSNS (5), /* muldi */
355 COSTS_N_INSNS (19), /* divsi */
356 COSTS_N_INSNS (19), /* divdi */
357 COSTS_N_INSNS (2), /* fp */
358 COSTS_N_INSNS (2), /* dmul */
359 COSTS_N_INSNS (19), /* sdiv */
360 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
361 128,
362 64, /* l1 cache */
363 512, /* l2 cache */
0b11da67 364 0, /* streams */
8b897cfa
RS
365};
366
367/* Instruction costs on RIOS2 processors. */
368static const
369struct processor_costs rios2_cost = {
06a67bdd
RS
370 COSTS_N_INSNS (2), /* mulsi */
371 COSTS_N_INSNS (2), /* mulsi_const */
372 COSTS_N_INSNS (2), /* mulsi_const9 */
373 COSTS_N_INSNS (2), /* muldi */
374 COSTS_N_INSNS (13), /* divsi */
375 COSTS_N_INSNS (13), /* divdi */
376 COSTS_N_INSNS (2), /* fp */
377 COSTS_N_INSNS (2), /* dmul */
378 COSTS_N_INSNS (17), /* sdiv */
379 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
380 256,
381 256, /* l1 cache */
382 1024, /* l2 cache */
0b11da67 383 0, /* streams */
8b897cfa
RS
384};
385
386/* Instruction costs on RS64A processors. */
387static const
388struct processor_costs rs64a_cost = {
06a67bdd
RS
389 COSTS_N_INSNS (20), /* mulsi */
390 COSTS_N_INSNS (12), /* mulsi_const */
391 COSTS_N_INSNS (8), /* mulsi_const9 */
392 COSTS_N_INSNS (34), /* muldi */
393 COSTS_N_INSNS (65), /* divsi */
394 COSTS_N_INSNS (67), /* divdi */
395 COSTS_N_INSNS (4), /* fp */
396 COSTS_N_INSNS (4), /* dmul */
397 COSTS_N_INSNS (31), /* sdiv */
398 COSTS_N_INSNS (31), /* ddiv */
0b11da67 399 128,
5f732aba
DE
400 128, /* l1 cache */
401 2048, /* l2 cache */
0b11da67 402 1, /* streams */
8b897cfa
RS
403};
404
405/* Instruction costs on MPCCORE processors. */
406static const
407struct processor_costs mpccore_cost = {
06a67bdd
RS
408 COSTS_N_INSNS (2), /* mulsi */
409 COSTS_N_INSNS (2), /* mulsi_const */
410 COSTS_N_INSNS (2), /* mulsi_const9 */
411 COSTS_N_INSNS (2), /* muldi */
412 COSTS_N_INSNS (6), /* divsi */
413 COSTS_N_INSNS (6), /* divdi */
414 COSTS_N_INSNS (4), /* fp */
415 COSTS_N_INSNS (5), /* dmul */
416 COSTS_N_INSNS (10), /* sdiv */
417 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
418 32,
419 4, /* l1 cache */
420 16, /* l2 cache */
0b11da67 421 1, /* streams */
8b897cfa
RS
422};
423
424/* Instruction costs on PPC403 processors. */
425static const
426struct processor_costs ppc403_cost = {
06a67bdd
RS
427 COSTS_N_INSNS (4), /* mulsi */
428 COSTS_N_INSNS (4), /* mulsi_const */
429 COSTS_N_INSNS (4), /* mulsi_const9 */
430 COSTS_N_INSNS (4), /* muldi */
431 COSTS_N_INSNS (33), /* divsi */
432 COSTS_N_INSNS (33), /* divdi */
433 COSTS_N_INSNS (11), /* fp */
434 COSTS_N_INSNS (11), /* dmul */
435 COSTS_N_INSNS (11), /* sdiv */
436 COSTS_N_INSNS (11), /* ddiv */
0b11da67 437 32,
5f732aba
DE
438 4, /* l1 cache */
439 16, /* l2 cache */
0b11da67 440 1, /* streams */
8b897cfa
RS
441};
442
443/* Instruction costs on PPC405 processors. */
444static const
445struct processor_costs ppc405_cost = {
06a67bdd
RS
446 COSTS_N_INSNS (5), /* mulsi */
447 COSTS_N_INSNS (4), /* mulsi_const */
448 COSTS_N_INSNS (3), /* mulsi_const9 */
449 COSTS_N_INSNS (5), /* muldi */
450 COSTS_N_INSNS (35), /* divsi */
451 COSTS_N_INSNS (35), /* divdi */
452 COSTS_N_INSNS (11), /* fp */
453 COSTS_N_INSNS (11), /* dmul */
454 COSTS_N_INSNS (11), /* sdiv */
455 COSTS_N_INSNS (11), /* ddiv */
0b11da67 456 32,
5f732aba
DE
457 16, /* l1 cache */
458 128, /* l2 cache */
0b11da67 459 1, /* streams */
8b897cfa
RS
460};
461
462/* Instruction costs on PPC440 processors. */
463static const
464struct processor_costs ppc440_cost = {
06a67bdd
RS
465 COSTS_N_INSNS (3), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (3), /* muldi */
469 COSTS_N_INSNS (34), /* divsi */
470 COSTS_N_INSNS (34), /* divdi */
471 COSTS_N_INSNS (5), /* fp */
472 COSTS_N_INSNS (5), /* dmul */
473 COSTS_N_INSNS (19), /* sdiv */
474 COSTS_N_INSNS (33), /* ddiv */
0b11da67 475 32,
5f732aba
DE
476 32, /* l1 cache */
477 256, /* l2 cache */
0b11da67 478 1, /* streams */
8b897cfa
RS
479};
480
481/* Instruction costs on PPC601 processors. */
482static const
483struct processor_costs ppc601_cost = {
06a67bdd
RS
484 COSTS_N_INSNS (5), /* mulsi */
485 COSTS_N_INSNS (5), /* mulsi_const */
486 COSTS_N_INSNS (5), /* mulsi_const9 */
487 COSTS_N_INSNS (5), /* muldi */
488 COSTS_N_INSNS (36), /* divsi */
489 COSTS_N_INSNS (36), /* divdi */
490 COSTS_N_INSNS (4), /* fp */
491 COSTS_N_INSNS (5), /* dmul */
492 COSTS_N_INSNS (17), /* sdiv */
493 COSTS_N_INSNS (31), /* ddiv */
0b11da67 494 32,
5f732aba
DE
495 32, /* l1 cache */
496 256, /* l2 cache */
0b11da67 497 1, /* streams */
8b897cfa
RS
498};
499
500/* Instruction costs on PPC603 processors. */
501static const
502struct processor_costs ppc603_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (3), /* mulsi_const */
505 COSTS_N_INSNS (2), /* mulsi_const9 */
506 COSTS_N_INSNS (5), /* muldi */
507 COSTS_N_INSNS (37), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (4), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (33), /* ddiv */
0b11da67 513 32,
5f732aba
DE
514 8, /* l1 cache */
515 64, /* l2 cache */
0b11da67 516 1, /* streams */
8b897cfa
RS
517};
518
519/* Instruction costs on PPC604 processors. */
520static const
521struct processor_costs ppc604_cost = {
06a67bdd
RS
522 COSTS_N_INSNS (4), /* mulsi */
523 COSTS_N_INSNS (4), /* mulsi_const */
524 COSTS_N_INSNS (4), /* mulsi_const9 */
525 COSTS_N_INSNS (4), /* muldi */
526 COSTS_N_INSNS (20), /* divsi */
527 COSTS_N_INSNS (20), /* divdi */
528 COSTS_N_INSNS (3), /* fp */
529 COSTS_N_INSNS (3), /* dmul */
530 COSTS_N_INSNS (18), /* sdiv */
531 COSTS_N_INSNS (32), /* ddiv */
0b11da67 532 32,
5f732aba
DE
533 16, /* l1 cache */
534 512, /* l2 cache */
0b11da67 535 1, /* streams */
8b897cfa
RS
536};
537
538/* Instruction costs on PPC604e processors. */
539static const
540struct processor_costs ppc604e_cost = {
06a67bdd
RS
541 COSTS_N_INSNS (2), /* mulsi */
542 COSTS_N_INSNS (2), /* mulsi_const */
543 COSTS_N_INSNS (2), /* mulsi_const9 */
544 COSTS_N_INSNS (2), /* muldi */
545 COSTS_N_INSNS (20), /* divsi */
546 COSTS_N_INSNS (20), /* divdi */
547 COSTS_N_INSNS (3), /* fp */
548 COSTS_N_INSNS (3), /* dmul */
549 COSTS_N_INSNS (18), /* sdiv */
550 COSTS_N_INSNS (32), /* ddiv */
0b11da67 551 32,
5f732aba
DE
552 32, /* l1 cache */
553 1024, /* l2 cache */
0b11da67 554 1, /* streams */
8b897cfa
RS
555};
556
f0517163 557/* Instruction costs on PPC620 processors. */
8b897cfa
RS
558static const
559struct processor_costs ppc620_cost = {
06a67bdd
RS
560 COSTS_N_INSNS (5), /* mulsi */
561 COSTS_N_INSNS (4), /* mulsi_const */
562 COSTS_N_INSNS (3), /* mulsi_const9 */
563 COSTS_N_INSNS (7), /* muldi */
564 COSTS_N_INSNS (21), /* divsi */
565 COSTS_N_INSNS (37), /* divdi */
566 COSTS_N_INSNS (3), /* fp */
567 COSTS_N_INSNS (3), /* dmul */
568 COSTS_N_INSNS (18), /* sdiv */
569 COSTS_N_INSNS (32), /* ddiv */
0b11da67 570 128,
5f732aba
DE
571 32, /* l1 cache */
572 1024, /* l2 cache */
0b11da67 573 1, /* streams */
f0517163
RS
574};
575
576/* Instruction costs on PPC630 processors. */
577static const
578struct processor_costs ppc630_cost = {
06a67bdd
RS
579 COSTS_N_INSNS (5), /* mulsi */
580 COSTS_N_INSNS (4), /* mulsi_const */
581 COSTS_N_INSNS (3), /* mulsi_const9 */
582 COSTS_N_INSNS (7), /* muldi */
583 COSTS_N_INSNS (21), /* divsi */
584 COSTS_N_INSNS (37), /* divdi */
585 COSTS_N_INSNS (3), /* fp */
586 COSTS_N_INSNS (3), /* dmul */
587 COSTS_N_INSNS (17), /* sdiv */
588 COSTS_N_INSNS (21), /* ddiv */
0b11da67 589 128,
5f732aba
DE
590 64, /* l1 cache */
591 1024, /* l2 cache */
0b11da67 592 1, /* streams */
8b897cfa
RS
593};
594
d296e02e
AP
595/* Instruction costs on Cell processor. */
596/* COSTS_N_INSNS (1) ~ one add. */
597static const
598struct processor_costs ppccell_cost = {
599 COSTS_N_INSNS (9/2)+2, /* mulsi */
600 COSTS_N_INSNS (6/2), /* mulsi_const */
601 COSTS_N_INSNS (6/2), /* mulsi_const9 */
602 COSTS_N_INSNS (15/2)+2, /* muldi */
603 COSTS_N_INSNS (38/2), /* divsi */
604 COSTS_N_INSNS (70/2), /* divdi */
605 COSTS_N_INSNS (10/2), /* fp */
606 COSTS_N_INSNS (10/2), /* dmul */
607 COSTS_N_INSNS (74/2), /* sdiv */
608 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 609 128,
5f732aba
DE
610 32, /* l1 cache */
611 512, /* l2 cache */
612 6, /* streams */
d296e02e
AP
613};
614
8b897cfa
RS
615/* Instruction costs on PPC750 and PPC7400 processors. */
616static const
617struct processor_costs ppc750_cost = {
06a67bdd
RS
618 COSTS_N_INSNS (5), /* mulsi */
619 COSTS_N_INSNS (3), /* mulsi_const */
620 COSTS_N_INSNS (2), /* mulsi_const9 */
621 COSTS_N_INSNS (5), /* muldi */
622 COSTS_N_INSNS (17), /* divsi */
623 COSTS_N_INSNS (17), /* divdi */
624 COSTS_N_INSNS (3), /* fp */
625 COSTS_N_INSNS (3), /* dmul */
626 COSTS_N_INSNS (17), /* sdiv */
627 COSTS_N_INSNS (31), /* ddiv */
0b11da67 628 32,
5f732aba
DE
629 32, /* l1 cache */
630 512, /* l2 cache */
0b11da67 631 1, /* streams */
8b897cfa
RS
632};
633
634/* Instruction costs on PPC7450 processors. */
635static const
636struct processor_costs ppc7450_cost = {
06a67bdd
RS
637 COSTS_N_INSNS (4), /* mulsi */
638 COSTS_N_INSNS (3), /* mulsi_const */
639 COSTS_N_INSNS (3), /* mulsi_const9 */
640 COSTS_N_INSNS (4), /* muldi */
641 COSTS_N_INSNS (23), /* divsi */
642 COSTS_N_INSNS (23), /* divdi */
643 COSTS_N_INSNS (5), /* fp */
644 COSTS_N_INSNS (5), /* dmul */
645 COSTS_N_INSNS (21), /* sdiv */
646 COSTS_N_INSNS (35), /* ddiv */
0b11da67 647 32,
5f732aba
DE
648 32, /* l1 cache */
649 1024, /* l2 cache */
0b11da67 650 1, /* streams */
8b897cfa 651};
a3170dc6 652
8b897cfa
RS
653/* Instruction costs on PPC8540 processors. */
654static const
655struct processor_costs ppc8540_cost = {
06a67bdd
RS
656 COSTS_N_INSNS (4), /* mulsi */
657 COSTS_N_INSNS (4), /* mulsi_const */
658 COSTS_N_INSNS (4), /* mulsi_const9 */
659 COSTS_N_INSNS (4), /* muldi */
660 COSTS_N_INSNS (19), /* divsi */
661 COSTS_N_INSNS (19), /* divdi */
662 COSTS_N_INSNS (4), /* fp */
663 COSTS_N_INSNS (4), /* dmul */
664 COSTS_N_INSNS (29), /* sdiv */
665 COSTS_N_INSNS (29), /* ddiv */
0b11da67 666 32,
5f732aba
DE
667 32, /* l1 cache */
668 256, /* l2 cache */
0b11da67 669 1, /* prefetch streams /*/
8b897cfa
RS
670};
671
672/* Instruction costs on POWER4 and POWER5 processors. */
673static const
674struct processor_costs power4_cost = {
06a67bdd
RS
675 COSTS_N_INSNS (3), /* mulsi */
676 COSTS_N_INSNS (2), /* mulsi_const */
677 COSTS_N_INSNS (2), /* mulsi_const9 */
678 COSTS_N_INSNS (4), /* muldi */
679 COSTS_N_INSNS (18), /* divsi */
680 COSTS_N_INSNS (34), /* divdi */
681 COSTS_N_INSNS (3), /* fp */
682 COSTS_N_INSNS (3), /* dmul */
683 COSTS_N_INSNS (17), /* sdiv */
684 COSTS_N_INSNS (17), /* ddiv */
0b11da67 685 128,
5f732aba
DE
686 32, /* l1 cache */
687 1024, /* l2 cache */
0b11da67 688 8, /* prefetch streams /*/
8b897cfa
RS
689};
690
44cd321e
PS
691/* Instruction costs on POWER6 processors. */
692static const
693struct processor_costs power6_cost = {
694 COSTS_N_INSNS (8), /* mulsi */
695 COSTS_N_INSNS (8), /* mulsi_const */
696 COSTS_N_INSNS (8), /* mulsi_const9 */
697 COSTS_N_INSNS (8), /* muldi */
698 COSTS_N_INSNS (22), /* divsi */
699 COSTS_N_INSNS (28), /* divdi */
700 COSTS_N_INSNS (3), /* fp */
701 COSTS_N_INSNS (3), /* dmul */
702 COSTS_N_INSNS (13), /* sdiv */
703 COSTS_N_INSNS (16), /* ddiv */
0b11da67 704 128,
5f732aba
DE
705 64, /* l1 cache */
706 2048, /* l2 cache */
0b11da67 707 16, /* prefetch streams */
44cd321e
PS
708};
709
8b897cfa 710\f
a2369ed3 711static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 712static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 713static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
714static void rs6000_emit_stack_tie (void);
715static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
716static rtx spe_synthesize_frame_save (rtx);
717static bool spe_func_has_64bit_regs_p (void);
b20a9cca 718static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 719 int, HOST_WIDE_INT);
a2369ed3
DJ
720static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
721static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
722static unsigned rs6000_hash_constant (rtx);
723static unsigned toc_hash_function (const void *);
724static int toc_hash_eq (const void *, const void *);
725static int constant_pool_expr_1 (rtx, int *, int *);
726static bool constant_pool_expr_p (rtx);
d04b6e6e 727static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
728static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
729static struct machine_function * rs6000_init_machine_status (void);
730static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 731static bool no_global_regs_above (int);
5add3202 732#ifdef HAVE_GAS_HIDDEN
a2369ed3 733static void rs6000_assemble_visibility (tree, int);
5add3202 734#endif
a2369ed3
DJ
735static int rs6000_ra_ever_killed (void);
736static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 737static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 738static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 739static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 740static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 741static const char *rs6000_mangle_type (const_tree);
b86fe7b4 742extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 743static void rs6000_set_default_type_attributes (tree);
52ff33d0 744static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
745static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
746static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
747static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
748 tree);
a2369ed3 749static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 750static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 751static void rs6000_file_start (void);
7c262518 752#if TARGET_ELF
9b580a0b 753static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
754static void rs6000_elf_asm_out_constructor (rtx, int);
755static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 756static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 757static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
758static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
759 unsigned HOST_WIDE_INT);
a56d7372 760static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 761 ATTRIBUTE_UNUSED;
7c262518 762#endif
3101faab 763static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
764static void rs6000_alloc_sdmode_stack_slot (void);
765static void rs6000_instantiate_decls (void);
cbaaba19 766#if TARGET_XCOFF
0d5817b2 767static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 768static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 769static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 770static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 771static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 772static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 773 unsigned HOST_WIDE_INT);
d6b5193b
RS
774static void rs6000_xcoff_unique_section (tree, int);
775static section *rs6000_xcoff_select_rtx_section
776 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
777static const char * rs6000_xcoff_strip_name_encoding (const char *);
778static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
779static void rs6000_xcoff_file_start (void);
780static void rs6000_xcoff_file_end (void);
f1384257 781#endif
a2369ed3
DJ
782static int rs6000_variable_issue (FILE *, int, rtx, int);
783static bool rs6000_rtx_costs (rtx, int, int, int *);
784static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 785static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 786static bool is_microcoded_insn (rtx);
d296e02e 787static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
788static bool is_cracked_insn (rtx);
789static bool is_branch_slot_insn (rtx);
44cd321e 790static bool is_load_insn (rtx);
e3a0e200 791static rtx get_store_dest (rtx pat);
44cd321e
PS
792static bool is_store_insn (rtx);
793static bool set_to_load_agen (rtx,rtx);
982afe02 794static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
795static int rs6000_adjust_priority (rtx, int);
796static int rs6000_issue_rate (void);
b198261f 797static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
798static rtx get_next_active_insn (rtx, rtx);
799static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
800static bool insn_must_be_first_in_group (rtx);
801static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
802static bool is_costly_group (rtx *, rtx);
803static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
804static int redefine_groups (FILE *, int, rtx, rtx);
805static int pad_groups (FILE *, int, rtx, rtx);
806static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
807static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
808static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 809static int rs6000_use_sched_lookahead (void);
d296e02e 810static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 811static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 812static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
813static tree rs6000_builtin_mul_widen_even (tree);
814static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 815static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 816
58646b77 817static void def_builtin (int, const char *, tree, int);
3101faab 818static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
819static void rs6000_init_builtins (void);
820static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
821static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
822static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
823static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
824static void altivec_init_builtins (void);
825static void rs6000_common_init_builtins (void);
c15c90bb 826static void rs6000_init_libfuncs (void);
a2369ed3 827
96038623
DE
828static void paired_init_builtins (void);
829static rtx paired_expand_builtin (tree, rtx, bool *);
830static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
831static rtx paired_expand_stv_builtin (enum insn_code, tree);
832static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
833
b20a9cca
AM
834static void enable_mask_for_builtins (struct builtin_description *, int,
835 enum rs6000_builtins,
836 enum rs6000_builtins);
7c62e993 837static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
838static void spe_init_builtins (void);
839static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 840static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
841static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
842static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
843static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
844static rs6000_stack_t *rs6000_stack_info (void);
845static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
846
847static rtx altivec_expand_builtin (tree, rtx, bool *);
848static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
849static rtx altivec_expand_st_builtin (tree, rtx, bool *);
850static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
851static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 852static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 853 const char *, tree, rtx);
b4a62fa0 854static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 855static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
856static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
857static rtx altivec_expand_vec_set_builtin (tree);
858static rtx altivec_expand_vec_ext_builtin (tree, rtx);
859static int get_element_number (tree, tree);
78f5898b 860static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 861static void rs6000_parse_tls_size_option (void);
5da702b1 862static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
863static int first_altivec_reg_to_save (void);
864static unsigned int compute_vrsave_mask (void);
9390387d 865static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
866static void is_altivec_return_reg (rtx, void *);
867static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
868int easy_vector_constant (rtx, enum machine_mode);
3101faab 869static bool rs6000_is_opaque_type (const_tree);
a2369ed3 870static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 871static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 872static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 873static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
874static rtx rs6000_tls_get_addr (void);
875static rtx rs6000_got_sym (void);
9390387d 876static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
877static const char *rs6000_get_some_local_dynamic_name (void);
878static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 879static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 880static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 881 enum machine_mode, tree);
0b5383eb
DJ
882static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
883 HOST_WIDE_INT);
884static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
885 tree, HOST_WIDE_INT);
886static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
887 HOST_WIDE_INT,
888 rtx[], int *);
889static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
890 const_tree, HOST_WIDE_INT,
891 rtx[], int *);
892static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 893static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 894static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
895static void setup_incoming_varargs (CUMULATIVE_ARGS *,
896 enum machine_mode, tree,
897 int *, int);
8cd5a4e0 898static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 899 const_tree, bool);
78a52f11
RH
900static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
901 tree, bool);
3101faab 902static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
903#if TARGET_MACHO
904static void macho_branch_islands (void);
efdba735
SH
905static int no_previous_def (tree function_name);
906static tree get_prev_label (tree function_name);
c4e18b1c 907static void rs6000_darwin_file_start (void);
efdba735
SH
908#endif
909
c35d187f 910static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 911static void rs6000_va_start (tree, rtx);
23a60a04 912static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 913static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 914static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 915static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 916static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 917 enum machine_mode);
94ff898d 918static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
919 enum machine_mode);
920static int get_vsel_insn (enum machine_mode);
921static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 922static tree rs6000_stack_protect_fail (void);
21213b4c
DP
923
924const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
925static enum machine_mode rs6000_eh_return_filter_mode (void);
926
17211ab5
GK
927/* Hash table stuff for keeping track of TOC entries. */
928
929struct toc_hash_struct GTY(())
930{
931 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
932 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
933 rtx key;
934 enum machine_mode key_mode;
935 int labelno;
936};
937
938static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
939\f
940/* Default register names. */
941char rs6000_reg_names[][8] =
942{
802a0058
MM
943 "0", "1", "2", "3", "4", "5", "6", "7",
944 "8", "9", "10", "11", "12", "13", "14", "15",
945 "16", "17", "18", "19", "20", "21", "22", "23",
946 "24", "25", "26", "27", "28", "29", "30", "31",
947 "0", "1", "2", "3", "4", "5", "6", "7",
948 "8", "9", "10", "11", "12", "13", "14", "15",
949 "16", "17", "18", "19", "20", "21", "22", "23",
950 "24", "25", "26", "27", "28", "29", "30", "31",
951 "mq", "lr", "ctr","ap",
952 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
953 "xer",
954 /* AltiVec registers. */
0cd5e3a1
AH
955 "0", "1", "2", "3", "4", "5", "6", "7",
956 "8", "9", "10", "11", "12", "13", "14", "15",
957 "16", "17", "18", "19", "20", "21", "22", "23",
958 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
959 "vrsave", "vscr",
960 /* SPE registers. */
7d5175e1
JJ
961 "spe_acc", "spefscr",
962 /* Soft frame pointer. */
963 "sfp"
c81bebd7
MM
964};
965
966#ifdef TARGET_REGNAMES
8b60264b 967static const char alt_reg_names[][8] =
c81bebd7 968{
802a0058
MM
969 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
970 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
971 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
972 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
973 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
974 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
975 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
976 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
977 "mq", "lr", "ctr", "ap",
978 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 979 "xer",
59a4c851 980 /* AltiVec registers. */
0ac081f6 981 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
982 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
983 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
984 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
985 "vrsave", "vscr",
986 /* SPE registers. */
7d5175e1
JJ
987 "spe_acc", "spefscr",
988 /* Soft frame pointer. */
989 "sfp"
c81bebd7
MM
990};
991#endif
9878760c 992\f
daf11973
MM
993#ifndef MASK_STRICT_ALIGN
994#define MASK_STRICT_ALIGN 0
995#endif
ffcfcb5f
AM
996#ifndef TARGET_PROFILE_KERNEL
997#define TARGET_PROFILE_KERNEL 0
998#endif
3961e8fe
RH
999
1000/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1001#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1002\f
1003/* Initialize the GCC target structure. */
91d231cb
JM
1004#undef TARGET_ATTRIBUTE_TABLE
1005#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1006#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1007#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1008
301d03af
RS
1009#undef TARGET_ASM_ALIGNED_DI_OP
1010#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1011
1012/* Default unaligned ops are only provided for ELF. Find the ops needed
1013 for non-ELF systems. */
1014#ifndef OBJECT_FORMAT_ELF
cbaaba19 1015#if TARGET_XCOFF
ae6c1efd 1016/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1017 64-bit targets. */
1018#undef TARGET_ASM_UNALIGNED_HI_OP
1019#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1020#undef TARGET_ASM_UNALIGNED_SI_OP
1021#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1022#undef TARGET_ASM_UNALIGNED_DI_OP
1023#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1024#else
1025/* For Darwin. */
1026#undef TARGET_ASM_UNALIGNED_HI_OP
1027#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1028#undef TARGET_ASM_UNALIGNED_SI_OP
1029#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1030#undef TARGET_ASM_UNALIGNED_DI_OP
1031#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1032#undef TARGET_ASM_ALIGNED_DI_OP
1033#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1034#endif
1035#endif
1036
1037/* This hook deals with fixups for relocatable code and DI-mode objects
1038 in 64-bit code. */
1039#undef TARGET_ASM_INTEGER
1040#define TARGET_ASM_INTEGER rs6000_assemble_integer
1041
93638d7a
AM
1042#ifdef HAVE_GAS_HIDDEN
1043#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1044#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1045#endif
1046
c4501e62
JJ
1047#undef TARGET_HAVE_TLS
1048#define TARGET_HAVE_TLS HAVE_AS_TLS
1049
1050#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1051#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1052
08c148a8
NB
1053#undef TARGET_ASM_FUNCTION_PROLOGUE
1054#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1055#undef TARGET_ASM_FUNCTION_EPILOGUE
1056#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1057
b54cf83a
DE
1058#undef TARGET_SCHED_VARIABLE_ISSUE
1059#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1060
c237e94a
ZW
1061#undef TARGET_SCHED_ISSUE_RATE
1062#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1063#undef TARGET_SCHED_ADJUST_COST
1064#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1065#undef TARGET_SCHED_ADJUST_PRIORITY
1066#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1067#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1068#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1069#undef TARGET_SCHED_INIT
1070#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1071#undef TARGET_SCHED_FINISH
1072#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1073#undef TARGET_SCHED_REORDER
1074#define TARGET_SCHED_REORDER rs6000_sched_reorder
1075#undef TARGET_SCHED_REORDER2
1076#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1077
be12c2b0
VM
1078#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1079#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1080
d296e02e
AP
1081#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1082#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1083
7ccf35ed
DN
1084#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1085#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1086#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1087#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1088#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1089#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1090#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1091#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1092
5b900a4c
DN
1093#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1094#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1095
0ac081f6
AH
1096#undef TARGET_INIT_BUILTINS
1097#define TARGET_INIT_BUILTINS rs6000_init_builtins
1098
1099#undef TARGET_EXPAND_BUILTIN
1100#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1101
608063c3
JB
1102#undef TARGET_MANGLE_TYPE
1103#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1104
c15c90bb
ZW
1105#undef TARGET_INIT_LIBFUNCS
1106#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1107
f1384257 1108#if TARGET_MACHO
0e5dbd9b 1109#undef TARGET_BINDS_LOCAL_P
31920d83 1110#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1111#endif
0e5dbd9b 1112
77ccdfed
EC
1113#undef TARGET_MS_BITFIELD_LAYOUT_P
1114#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1115
3961e8fe
RH
1116#undef TARGET_ASM_OUTPUT_MI_THUNK
1117#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1118
3961e8fe 1119#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1120#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1121
4977bab6
ZW
1122#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1123#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1124
2e3f0db6
DJ
1125#undef TARGET_INVALID_WITHIN_DOLOOP
1126#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1127
3c50106f
RH
1128#undef TARGET_RTX_COSTS
1129#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1130#undef TARGET_ADDRESS_COST
1131#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1132
c8e4f0e9 1133#undef TARGET_VECTOR_OPAQUE_P
58646b77 1134#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1135
96714395
AH
1136#undef TARGET_DWARF_REGISTER_SPAN
1137#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1138
37ea0b7e
JM
1139#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1140#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1141
c6e8c921
GK
1142/* On rs6000, function arguments are promoted, as are function return
1143 values. */
1144#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1145#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1146#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1147#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1148
c6e8c921
GK
1149#undef TARGET_RETURN_IN_MEMORY
1150#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1151
1152#undef TARGET_SETUP_INCOMING_VARARGS
1153#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1154
1155/* Always strict argument naming on rs6000. */
1156#undef TARGET_STRICT_ARGUMENT_NAMING
1157#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1158#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1159#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1160#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1161#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1162#undef TARGET_MUST_PASS_IN_STACK
1163#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1164#undef TARGET_PASS_BY_REFERENCE
1165#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1166#undef TARGET_ARG_PARTIAL_BYTES
1167#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1168
c35d187f
RH
1169#undef TARGET_BUILD_BUILTIN_VA_LIST
1170#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1171
d7bd8aeb
JJ
1172#undef TARGET_EXPAND_BUILTIN_VA_START
1173#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1174
cd3ce9b4
JM
1175#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1176#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1177
93f90be6
FJ
1178#undef TARGET_EH_RETURN_FILTER_MODE
1179#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1180
00b79d54
BE
1181#undef TARGET_SCALAR_MODE_SUPPORTED_P
1182#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1183
f676971a
EC
1184#undef TARGET_VECTOR_MODE_SUPPORTED_P
1185#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1186
4d3e6fae
FJ
1187#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1188#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1189
78f5898b
AH
1190#undef TARGET_HANDLE_OPTION
1191#define TARGET_HANDLE_OPTION rs6000_handle_option
1192
1193#undef TARGET_DEFAULT_TARGET_FLAGS
1194#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1195 (TARGET_DEFAULT)
78f5898b 1196
3aebbe5f
JJ
1197#undef TARGET_STACK_PROTECT_FAIL
1198#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1199
445cf5eb
JM
1200/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1201 The PowerPC architecture requires only weak consistency among
1202 processors--that is, memory accesses between processors need not be
1203 sequentially consistent and memory accesses among processors can occur
1204 in any order. The ability to order memory accesses weakly provides
1205 opportunities for more efficient use of the system bus. Unless a
1206 dependency exists, the 604e allows read operations to precede store
1207 operations. */
1208#undef TARGET_RELAXED_ORDERING
1209#define TARGET_RELAXED_ORDERING true
1210
fdbe66f2
EB
1211#ifdef HAVE_AS_TLS
1212#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1213#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1214#endif
1215
aacd3885
RS
1216/* Use a 32-bit anchor range. This leads to sequences like:
1217
1218 addis tmp,anchor,high
1219 add dest,tmp,low
1220
1221 where tmp itself acts as an anchor, and can be shared between
1222 accesses to the same 64k page. */
1223#undef TARGET_MIN_ANCHOR_OFFSET
1224#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1225#undef TARGET_MAX_ANCHOR_OFFSET
1226#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1227#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1228#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1229
9c78b944
DE
1230#undef TARGET_BUILTIN_RECIPROCAL
1231#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1232
e41b2a33
PB
1233#undef TARGET_EXPAND_TO_RTL_HOOK
1234#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1235
1236#undef TARGET_INSTANTIATE_DECLS
1237#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1238
f6897b10 1239struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1240\f
0d1fbc8c
AH
1241
1242/* Value is 1 if hard register REGNO can hold a value of machine-mode
1243 MODE. */
1244static int
1245rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1246{
1247 /* The GPRs can hold any mode, but values bigger than one register
1248 cannot go past R31. */
1249 if (INT_REGNO_P (regno))
1250 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1251
a5a97921 1252 /* The float registers can only hold floating modes and DImode.
7393f7f8 1253 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1254 if (FP_REGNO_P (regno))
1255 return
96038623 1256 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1257 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1258 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1259 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1260 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1261 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1262 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1263
1264 /* The CR register can only hold CC modes. */
1265 if (CR_REGNO_P (regno))
1266 return GET_MODE_CLASS (mode) == MODE_CC;
1267
1268 if (XER_REGNO_P (regno))
1269 return mode == PSImode;
1270
1271 /* AltiVec only in AldyVec registers. */
1272 if (ALTIVEC_REGNO_P (regno))
1273 return ALTIVEC_VECTOR_MODE (mode);
1274
1275 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1276 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1277 return 1;
1278
1279 /* We cannot put TImode anywhere except general register and it must be
1280 able to fit within the register set. */
1281
1282 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1283}
1284
1285/* Initialize rs6000_hard_regno_mode_ok_p table. */
1286static void
1287rs6000_init_hard_regno_mode_ok (void)
1288{
1289 int r, m;
1290
1291 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1292 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1293 if (rs6000_hard_regno_mode_ok (r, m))
1294 rs6000_hard_regno_mode_ok_p[m][r] = true;
1295}
1296
e4cad568
GK
1297#if TARGET_MACHO
1298/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1299
1300static void
1301darwin_rs6000_override_options (void)
1302{
1303 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1304 off. */
1305 rs6000_altivec_abi = 1;
1306 TARGET_ALTIVEC_VRSAVE = 1;
1307 if (DEFAULT_ABI == ABI_DARWIN)
1308 {
1309 if (MACHO_DYNAMIC_NO_PIC_P)
1310 {
1311 if (flag_pic)
1312 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1313 flag_pic = 0;
1314 }
1315 else if (flag_pic == 1)
1316 {
1317 flag_pic = 2;
1318 }
1319 }
1320 if (TARGET_64BIT && ! TARGET_POWERPC64)
1321 {
1322 target_flags |= MASK_POWERPC64;
1323 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1324 }
1325 if (flag_mkernel)
1326 {
1327 rs6000_default_long_calls = 1;
1328 target_flags |= MASK_SOFT_FLOAT;
1329 }
1330
1331 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1332 Altivec. */
1333 if (!flag_mkernel && !flag_apple_kext
1334 && TARGET_64BIT
1335 && ! (target_flags_explicit & MASK_ALTIVEC))
1336 target_flags |= MASK_ALTIVEC;
1337
1338 /* Unless the user (not the configurer) has explicitly overridden
1339 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1340 G4 unless targetting the kernel. */
1341 if (!flag_mkernel
1342 && !flag_apple_kext
1343 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1344 && ! (target_flags_explicit & MASK_ALTIVEC)
1345 && ! rs6000_select[1].string)
1346 {
1347 target_flags |= MASK_ALTIVEC;
1348 }
1349}
1350#endif
1351
c1e55850
GK
1352/* If not otherwise specified by a target, make 'long double' equivalent to
1353 'double'. */
1354
1355#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1356#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1357#endif
1358
5248c961
RK
1359/* Override command line options. Mostly we process the processor
1360 type and sometimes adjust other TARGET_ options. */
1361
1362void
d779d0dc 1363rs6000_override_options (const char *default_cpu)
5248c961 1364{
c4d38ccb 1365 size_t i, j;
8e3f41e7 1366 struct rs6000_cpu_select *ptr;
66188a7e 1367 int set_masks;
5248c961 1368
66188a7e 1369 /* Simplifications for entries below. */
85638c0d 1370
66188a7e
GK
1371 enum {
1372 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1373 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1374 };
85638c0d 1375
66188a7e
GK
1376 /* This table occasionally claims that a processor does not support
1377 a particular feature even though it does, but the feature is slower
1378 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1379 complete description of the processor's support.
66188a7e
GK
1380
1381 Please keep this list in order, and don't forget to update the
1382 documentation in invoke.texi when adding a new processor or
1383 flag. */
5248c961
RK
1384 static struct ptt
1385 {
8b60264b
KG
1386 const char *const name; /* Canonical processor name. */
1387 const enum processor_type processor; /* Processor type enum value. */
1388 const int target_enable; /* Target flags to enable. */
8b60264b 1389 } const processor_target_table[]
66188a7e 1390 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1391 {"403", PROCESSOR_PPC403,
66188a7e 1392 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1393 {"405", PROCESSOR_PPC405,
716019c0
JM
1394 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1395 {"405fp", PROCESSOR_PPC405,
1396 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1397 {"440", PROCESSOR_PPC440,
716019c0
JM
1398 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1399 {"440fp", PROCESSOR_PPC440,
1400 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1401 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1402 {"601", PROCESSOR_PPC601,
66188a7e
GK
1403 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1404 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1405 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1406 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1407 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1408 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1409 {"620", PROCESSOR_PPC620,
1410 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1411 {"630", PROCESSOR_PPC630,
1412 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1413 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1414 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1415 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1416 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1417 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1418 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1419 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1420 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1421 /* 8548 has a dummy entry for now. */
a45bce6e 1422 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1423 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1424 {"970", PROCESSOR_POWER4,
66188a7e 1425 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1426 {"cell", PROCESSOR_CELL,
1427 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1428 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1429 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1430 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1431 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1432 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1433 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1434 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1435 {"power2", PROCESSOR_POWER,
1436 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1437 {"power3", PROCESSOR_PPC630,
1438 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1439 {"power4", PROCESSOR_POWER4,
fc091c8e 1440 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1441 {"power5", PROCESSOR_POWER5,
432218ba
DE
1442 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1443 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1444 {"power5+", PROCESSOR_POWER5,
1445 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1446 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1447 {"power6", PROCESSOR_POWER6,
e118597e 1448 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1449 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1450 {"power6x", PROCESSOR_POWER6,
1451 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1452 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1453 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1454 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1455 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1456 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1457 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1458 {"rios2", PROCESSOR_RIOS2,
1459 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1460 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1461 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1462 {"rs64", PROCESSOR_RS64A,
1463 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1464 };
5248c961 1465
ca7558fc 1466 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1467
66188a7e
GK
1468 /* Some OSs don't support saving the high part of 64-bit registers on
1469 context switch. Other OSs don't support saving Altivec registers.
1470 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1471 settings; if the user wants either, the user must explicitly specify
1472 them and we won't interfere with the user's specification. */
1473
1474 enum {
1475 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1476 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1477 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1478 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1479 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1480 };
0d1fbc8c
AH
1481
1482 rs6000_init_hard_regno_mode_ok ();
1483
c4ad648e 1484 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1485#ifdef OS_MISSING_POWERPC64
1486 if (OS_MISSING_POWERPC64)
1487 set_masks &= ~MASK_POWERPC64;
1488#endif
1489#ifdef OS_MISSING_ALTIVEC
1490 if (OS_MISSING_ALTIVEC)
1491 set_masks &= ~MASK_ALTIVEC;
1492#endif
1493
768875a8
AM
1494 /* Don't override by the processor default if given explicitly. */
1495 set_masks &= ~target_flags_explicit;
957211c3 1496
a4f6c312 1497 /* Identify the processor type. */
8e3f41e7 1498 rs6000_select[0].string = default_cpu;
3cb999d8 1499 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1500
b6a1cbae 1501 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1502 {
8e3f41e7
MM
1503 ptr = &rs6000_select[i];
1504 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1505 {
8e3f41e7
MM
1506 for (j = 0; j < ptt_size; j++)
1507 if (! strcmp (ptr->string, processor_target_table[j].name))
1508 {
1509 if (ptr->set_tune_p)
1510 rs6000_cpu = processor_target_table[j].processor;
1511
1512 if (ptr->set_arch_p)
1513 {
66188a7e
GK
1514 target_flags &= ~set_masks;
1515 target_flags |= (processor_target_table[j].target_enable
1516 & set_masks);
8e3f41e7
MM
1517 }
1518 break;
1519 }
1520
4406229e 1521 if (j == ptt_size)
8e3f41e7 1522 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1523 }
1524 }
8a61d227 1525
993f19a8 1526 if (TARGET_E500)
a3170dc6
AH
1527 rs6000_isel = 1;
1528
dff9f1b6
DE
1529 /* If we are optimizing big endian systems for space, use the load/store
1530 multiple and string instructions. */
ef792183 1531 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1532 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1533
a4f6c312
SS
1534 /* Don't allow -mmultiple or -mstring on little endian systems
1535 unless the cpu is a 750, because the hardware doesn't support the
1536 instructions used in little endian mode, and causes an alignment
1537 trap. The 750 does not cause an alignment trap (except when the
1538 target is unaligned). */
bef84347 1539
b21fb038 1540 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1541 {
1542 if (TARGET_MULTIPLE)
1543 {
1544 target_flags &= ~MASK_MULTIPLE;
b21fb038 1545 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1546 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1547 }
1548
1549 if (TARGET_STRING)
1550 {
1551 target_flags &= ~MASK_STRING;
b21fb038 1552 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1553 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1554 }
1555 }
3933e0e1 1556
38c1f2d7
MM
1557 /* Set debug flags */
1558 if (rs6000_debug_name)
1559 {
bfc79d3b 1560 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1561 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1562 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1563 rs6000_debug_stack = 1;
bfc79d3b 1564 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1565 rs6000_debug_arg = 1;
1566 else
c725bd79 1567 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1568 }
1569
57ac7be9
AM
1570 if (rs6000_traceback_name)
1571 {
1572 if (! strncmp (rs6000_traceback_name, "full", 4))
1573 rs6000_traceback = traceback_full;
1574 else if (! strncmp (rs6000_traceback_name, "part", 4))
1575 rs6000_traceback = traceback_part;
1576 else if (! strncmp (rs6000_traceback_name, "no", 2))
1577 rs6000_traceback = traceback_none;
1578 else
9e637a26 1579 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1580 rs6000_traceback_name);
1581 }
1582
78f5898b
AH
1583 if (!rs6000_explicit_options.long_double)
1584 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1585
602ea4d3 1586#ifndef POWERPC_LINUX
d3603e8c 1587 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1588 rs6000_ieeequad = 1;
1589#endif
1590
0db747be
DE
1591 /* Enable Altivec ABI for AIX -maltivec. */
1592 if (TARGET_XCOFF && TARGET_ALTIVEC)
1593 rs6000_altivec_abi = 1;
1594
a2db2771
JJ
1595 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1596 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1597 be explicitly overridden in either case. */
1598 if (TARGET_ELF)
6d0ef01e 1599 {
a2db2771
JJ
1600 if (!rs6000_explicit_options.altivec_abi
1601 && (TARGET_64BIT || TARGET_ALTIVEC))
1602 rs6000_altivec_abi = 1;
1603
1604 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1605 if (!rs6000_explicit_options.vrsave)
1606 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1607 }
1608
594a51fe
SS
1609 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1610 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1611 {
1612 rs6000_darwin64_abi = 1;
9c7956fd 1613#if TARGET_MACHO
6ac49599 1614 darwin_one_byte_bool = 1;
9c7956fd 1615#endif
d9168963
SS
1616 /* Default to natural alignment, for better performance. */
1617 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1618 }
1619
194c524a
DE
1620 /* Place FP constants in the constant pool instead of TOC
1621 if section anchors enabled. */
1622 if (flag_section_anchors)
1623 TARGET_NO_FP_IN_TOC = 1;
1624
c4501e62
JJ
1625 /* Handle -mtls-size option. */
1626 rs6000_parse_tls_size_option ();
1627
a7ae18e2
AH
1628#ifdef SUBTARGET_OVERRIDE_OPTIONS
1629 SUBTARGET_OVERRIDE_OPTIONS;
1630#endif
1631#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1632 SUBSUBTARGET_OVERRIDE_OPTIONS;
1633#endif
4d4cbc0e
AH
1634#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1635 SUB3TARGET_OVERRIDE_OPTIONS;
1636#endif
a7ae18e2 1637
5da702b1
AH
1638 if (TARGET_E500)
1639 {
1640 /* The e500 does not have string instructions, and we set
1641 MASK_STRING above when optimizing for size. */
1642 if ((target_flags & MASK_STRING) != 0)
1643 target_flags = target_flags & ~MASK_STRING;
1644 }
1645 else if (rs6000_select[1].string != NULL)
1646 {
1647 /* For the powerpc-eabispe configuration, we set all these by
1648 default, so let's unset them if we manually set another
1649 CPU that is not the E500. */
a2db2771 1650 if (!rs6000_explicit_options.spe_abi)
5da702b1 1651 rs6000_spe_abi = 0;
78f5898b 1652 if (!rs6000_explicit_options.spe)
5da702b1 1653 rs6000_spe = 0;
78f5898b 1654 if (!rs6000_explicit_options.float_gprs)
5da702b1 1655 rs6000_float_gprs = 0;
78f5898b 1656 if (!rs6000_explicit_options.isel)
5da702b1
AH
1657 rs6000_isel = 0;
1658 }
b5044283 1659
eca0d5e8
JM
1660 /* Detect invalid option combinations with E500. */
1661 CHECK_E500_OPTIONS;
1662
ec507f2d 1663 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1664 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1665 && rs6000_cpu != PROCESSOR_POWER6
1666 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1667 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1668 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1669 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1670 || rs6000_cpu == PROCESSOR_POWER5
1671 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1672
ec507f2d
DE
1673 rs6000_sched_restricted_insns_priority
1674 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1675
569fa502 1676 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1677 rs6000_sched_costly_dep
1678 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1679
569fa502
DN
1680 if (rs6000_sched_costly_dep_str)
1681 {
f676971a 1682 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1683 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1684 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1685 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1686 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1687 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1688 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1689 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1690 else
c4ad648e 1691 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1692 }
1693
1694 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1695 rs6000_sched_insert_nops
1696 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1697
cbe26ab8
DN
1698 if (rs6000_sched_insert_nops_str)
1699 {
1700 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1701 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1702 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1703 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1704 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1705 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1706 else
c4ad648e 1707 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1708 }
1709
c81bebd7 1710#ifdef TARGET_REGNAMES
a4f6c312
SS
1711 /* If the user desires alternate register names, copy in the
1712 alternate names now. */
c81bebd7 1713 if (TARGET_REGNAMES)
4e135bdd 1714 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1715#endif
1716
df01da37 1717 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1718 If -maix-struct-return or -msvr4-struct-return was explicitly
1719 used, don't override with the ABI default. */
df01da37
DE
1720 if (!rs6000_explicit_options.aix_struct_ret)
1721 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1722
602ea4d3 1723 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1724 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1725
f676971a 1726 if (TARGET_TOC)
9ebbca7d 1727 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1728
301d03af
RS
1729 /* We can only guarantee the availability of DI pseudo-ops when
1730 assembling for 64-bit targets. */
ae6c1efd 1731 if (!TARGET_64BIT)
301d03af
RS
1732 {
1733 targetm.asm_out.aligned_op.di = NULL;
1734 targetm.asm_out.unaligned_op.di = NULL;
1735 }
1736
1494c534
DE
1737 /* Set branch target alignment, if not optimizing for size. */
1738 if (!optimize_size)
1739 {
d296e02e
AP
1740 /* Cell wants to be aligned 8byte for dual issue. */
1741 if (rs6000_cpu == PROCESSOR_CELL)
1742 {
1743 if (align_functions <= 0)
1744 align_functions = 8;
1745 if (align_jumps <= 0)
1746 align_jumps = 8;
1747 if (align_loops <= 0)
1748 align_loops = 8;
1749 }
44cd321e 1750 if (rs6000_align_branch_targets)
1494c534
DE
1751 {
1752 if (align_functions <= 0)
1753 align_functions = 16;
1754 if (align_jumps <= 0)
1755 align_jumps = 16;
1756 if (align_loops <= 0)
1757 align_loops = 16;
1758 }
1759 if (align_jumps_max_skip <= 0)
1760 align_jumps_max_skip = 15;
1761 if (align_loops_max_skip <= 0)
1762 align_loops_max_skip = 15;
1763 }
2792d578 1764
71f123ca
FS
1765 /* Arrange to save and restore machine status around nested functions. */
1766 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1767
1768 /* We should always be splitting complex arguments, but we can't break
1769 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1770 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1771 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1772
1773 /* Initialize rs6000_cost with the appropriate target costs. */
1774 if (optimize_size)
1775 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1776 else
1777 switch (rs6000_cpu)
1778 {
1779 case PROCESSOR_RIOS1:
1780 rs6000_cost = &rios1_cost;
1781 break;
1782
1783 case PROCESSOR_RIOS2:
1784 rs6000_cost = &rios2_cost;
1785 break;
1786
1787 case PROCESSOR_RS64A:
1788 rs6000_cost = &rs64a_cost;
1789 break;
1790
1791 case PROCESSOR_MPCCORE:
1792 rs6000_cost = &mpccore_cost;
1793 break;
1794
1795 case PROCESSOR_PPC403:
1796 rs6000_cost = &ppc403_cost;
1797 break;
1798
1799 case PROCESSOR_PPC405:
1800 rs6000_cost = &ppc405_cost;
1801 break;
1802
1803 case PROCESSOR_PPC440:
1804 rs6000_cost = &ppc440_cost;
1805 break;
1806
1807 case PROCESSOR_PPC601:
1808 rs6000_cost = &ppc601_cost;
1809 break;
1810
1811 case PROCESSOR_PPC603:
1812 rs6000_cost = &ppc603_cost;
1813 break;
1814
1815 case PROCESSOR_PPC604:
1816 rs6000_cost = &ppc604_cost;
1817 break;
1818
1819 case PROCESSOR_PPC604e:
1820 rs6000_cost = &ppc604e_cost;
1821 break;
1822
1823 case PROCESSOR_PPC620:
8b897cfa
RS
1824 rs6000_cost = &ppc620_cost;
1825 break;
1826
f0517163
RS
1827 case PROCESSOR_PPC630:
1828 rs6000_cost = &ppc630_cost;
1829 break;
1830
982afe02 1831 case PROCESSOR_CELL:
d296e02e
AP
1832 rs6000_cost = &ppccell_cost;
1833 break;
1834
8b897cfa
RS
1835 case PROCESSOR_PPC750:
1836 case PROCESSOR_PPC7400:
1837 rs6000_cost = &ppc750_cost;
1838 break;
1839
1840 case PROCESSOR_PPC7450:
1841 rs6000_cost = &ppc7450_cost;
1842 break;
1843
1844 case PROCESSOR_PPC8540:
1845 rs6000_cost = &ppc8540_cost;
1846 break;
1847
1848 case PROCESSOR_POWER4:
1849 case PROCESSOR_POWER5:
1850 rs6000_cost = &power4_cost;
1851 break;
1852
44cd321e
PS
1853 case PROCESSOR_POWER6:
1854 rs6000_cost = &power6_cost;
1855 break;
1856
8b897cfa 1857 default:
37409796 1858 gcc_unreachable ();
8b897cfa 1859 }
0b11da67
DE
1860
1861 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1862 set_param_value ("simultaneous-prefetches",
1863 rs6000_cost->simultaneous_prefetches);
1864 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1865 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1866 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1867 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1868 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1869 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1870
1871 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1872 can be optimized to ap = __builtin_next_arg (0). */
1873 if (DEFAULT_ABI != ABI_V4)
1874 targetm.expand_builtin_va_start = NULL;
5248c961 1875}
5accd822 1876
7ccf35ed
DN
1877/* Implement targetm.vectorize.builtin_mask_for_load. */
1878static tree
1879rs6000_builtin_mask_for_load (void)
1880{
1881 if (TARGET_ALTIVEC)
1882 return altivec_builtin_mask_for_load;
1883 else
1884 return 0;
1885}
1886
f57d17f1
TM
1887/* Implement targetm.vectorize.builtin_conversion. */
1888static tree
1889rs6000_builtin_conversion (enum tree_code code, tree type)
1890{
1891 if (!TARGET_ALTIVEC)
1892 return NULL_TREE;
982afe02 1893
f57d17f1
TM
1894 switch (code)
1895 {
1896 case FLOAT_EXPR:
1897 switch (TYPE_MODE (type))
1898 {
1899 case V4SImode:
982afe02 1900 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1901 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1902 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1903 default:
1904 return NULL_TREE;
1905 }
1906 default:
1907 return NULL_TREE;
1908 }
1909}
1910
89d67cca
DN
1911/* Implement targetm.vectorize.builtin_mul_widen_even. */
1912static tree
1913rs6000_builtin_mul_widen_even (tree type)
1914{
1915 if (!TARGET_ALTIVEC)
1916 return NULL_TREE;
1917
1918 switch (TYPE_MODE (type))
1919 {
1920 case V8HImode:
982afe02 1921 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1922 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1923 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1924
1925 case V16QImode:
1926 return TYPE_UNSIGNED (type) ?
1927 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1928 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1929 default:
1930 return NULL_TREE;
1931 }
1932}
1933
1934/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1935static tree
1936rs6000_builtin_mul_widen_odd (tree type)
1937{
1938 if (!TARGET_ALTIVEC)
1939 return NULL_TREE;
1940
1941 switch (TYPE_MODE (type))
1942 {
1943 case V8HImode:
1944 return TYPE_UNSIGNED (type) ?
1945 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1946 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1947
1948 case V16QImode:
1949 return TYPE_UNSIGNED (type) ?
1950 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1951 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1952 default:
1953 return NULL_TREE;
1954 }
1955}
1956
5b900a4c
DN
1957
1958/* Return true iff, data reference of TYPE can reach vector alignment (16)
1959 after applying N number of iterations. This routine does not determine
1960 how may iterations are required to reach desired alignment. */
1961
1962static bool
3101faab 1963rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1964{
1965 if (is_packed)
1966 return false;
1967
1968 if (TARGET_32BIT)
1969 {
1970 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1971 return true;
1972
1973 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1974 return true;
1975
1976 return false;
1977 }
1978 else
1979 {
1980 if (TARGET_MACHO)
1981 return false;
1982
1983 /* Assuming that all other types are naturally aligned. CHECKME! */
1984 return true;
1985 }
1986}
1987
5da702b1
AH
1988/* Handle generic options of the form -mfoo=yes/no.
1989 NAME is the option name.
1990 VALUE is the option value.
1991 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1992 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1993static void
5da702b1 1994rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1995{
5da702b1 1996 if (value == 0)
993f19a8 1997 return;
5da702b1
AH
1998 else if (!strcmp (value, "yes"))
1999 *flag = 1;
2000 else if (!strcmp (value, "no"))
2001 *flag = 0;
08b57fb3 2002 else
5da702b1 2003 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2004}
2005
c4501e62
JJ
2006/* Validate and record the size specified with the -mtls-size option. */
2007
2008static void
863d938c 2009rs6000_parse_tls_size_option (void)
c4501e62
JJ
2010{
2011 if (rs6000_tls_size_string == 0)
2012 return;
2013 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2014 rs6000_tls_size = 16;
2015 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2016 rs6000_tls_size = 32;
2017 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2018 rs6000_tls_size = 64;
2019 else
9e637a26 2020 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2021}
2022
5accd822 2023void
a2369ed3 2024optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2025{
2e3f0db6
DJ
2026 if (DEFAULT_ABI == ABI_DARWIN)
2027 /* The Darwin libraries never set errno, so we might as well
2028 avoid calling them when that's the only reason we would. */
2029 flag_errno_math = 0;
59d6560b
DE
2030
2031 /* Double growth factor to counter reduced min jump length. */
2032 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2033
2034 /* Enable section anchors by default.
2035 Skip section anchors for Objective C and Objective C++
2036 until front-ends fixed. */
23f99493 2037 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2038 flag_section_anchors = 1;
5accd822 2039}
78f5898b
AH
2040
2041/* Implement TARGET_HANDLE_OPTION. */
2042
2043static bool
2044rs6000_handle_option (size_t code, const char *arg, int value)
2045{
2046 switch (code)
2047 {
2048 case OPT_mno_power:
2049 target_flags &= ~(MASK_POWER | MASK_POWER2
2050 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2051 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2052 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2053 break;
2054 case OPT_mno_powerpc:
2055 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2056 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2057 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2058 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2059 break;
2060 case OPT_mfull_toc:
d2894ab5
DE
2061 target_flags &= ~MASK_MINIMAL_TOC;
2062 TARGET_NO_FP_IN_TOC = 0;
2063 TARGET_NO_SUM_IN_TOC = 0;
2064 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2065#ifdef TARGET_USES_SYSV4_OPT
2066 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2067 just the same as -mminimal-toc. */
2068 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2069 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2070#endif
2071 break;
2072
2073#ifdef TARGET_USES_SYSV4_OPT
2074 case OPT_mtoc:
2075 /* Make -mtoc behave like -mminimal-toc. */
2076 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2077 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2078 break;
2079#endif
2080
2081#ifdef TARGET_USES_AIX64_OPT
2082 case OPT_maix64:
2083#else
2084 case OPT_m64:
2085#endif
2c9c9afd
AM
2086 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2087 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2088 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2089 break;
2090
2091#ifdef TARGET_USES_AIX64_OPT
2092 case OPT_maix32:
2093#else
2094 case OPT_m32:
2095#endif
2096 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2097 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2098 break;
2099
2100 case OPT_minsert_sched_nops_:
2101 rs6000_sched_insert_nops_str = arg;
2102 break;
2103
2104 case OPT_mminimal_toc:
2105 if (value == 1)
2106 {
d2894ab5
DE
2107 TARGET_NO_FP_IN_TOC = 0;
2108 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2109 }
2110 break;
2111
2112 case OPT_mpower:
2113 if (value == 1)
c2dba4ab
AH
2114 {
2115 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2116 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2117 }
78f5898b
AH
2118 break;
2119
2120 case OPT_mpower2:
2121 if (value == 1)
c2dba4ab
AH
2122 {
2123 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2124 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2125 }
78f5898b
AH
2126 break;
2127
2128 case OPT_mpowerpc_gpopt:
2129 case OPT_mpowerpc_gfxopt:
2130 if (value == 1)
c2dba4ab
AH
2131 {
2132 target_flags |= MASK_POWERPC;
2133 target_flags_explicit |= MASK_POWERPC;
2134 }
78f5898b
AH
2135 break;
2136
df01da37
DE
2137 case OPT_maix_struct_return:
2138 case OPT_msvr4_struct_return:
2139 rs6000_explicit_options.aix_struct_ret = true;
2140 break;
2141
78f5898b 2142 case OPT_mvrsave_:
a2db2771 2143 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2144 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2145 break;
78f5898b
AH
2146
2147 case OPT_misel_:
2148 rs6000_explicit_options.isel = true;
2149 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2150 break;
2151
2152 case OPT_mspe_:
2153 rs6000_explicit_options.spe = true;
2154 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2155 break;
2156
2157 case OPT_mdebug_:
2158 rs6000_debug_name = arg;
2159 break;
2160
2161#ifdef TARGET_USES_SYSV4_OPT
2162 case OPT_mcall_:
2163 rs6000_abi_name = arg;
2164 break;
2165
2166 case OPT_msdata_:
2167 rs6000_sdata_name = arg;
2168 break;
2169
2170 case OPT_mtls_size_:
2171 rs6000_tls_size_string = arg;
2172 break;
2173
2174 case OPT_mrelocatable:
2175 if (value == 1)
c2dba4ab 2176 {
e0bf274f
AM
2177 target_flags |= MASK_MINIMAL_TOC;
2178 target_flags_explicit |= MASK_MINIMAL_TOC;
2179 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2180 }
78f5898b
AH
2181 break;
2182
2183 case OPT_mrelocatable_lib:
2184 if (value == 1)
c2dba4ab 2185 {
e0bf274f
AM
2186 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2187 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2188 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2189 }
78f5898b 2190 else
c2dba4ab
AH
2191 {
2192 target_flags &= ~MASK_RELOCATABLE;
2193 target_flags_explicit |= MASK_RELOCATABLE;
2194 }
78f5898b
AH
2195 break;
2196#endif
2197
2198 case OPT_mabi_:
78f5898b
AH
2199 if (!strcmp (arg, "altivec"))
2200 {
a2db2771 2201 rs6000_explicit_options.altivec_abi = true;
78f5898b 2202 rs6000_altivec_abi = 1;
a2db2771
JJ
2203
2204 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2205 rs6000_spe_abi = 0;
2206 }
2207 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2208 {
a2db2771 2209 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2210 rs6000_altivec_abi = 0;
2211 }
78f5898b
AH
2212 else if (! strcmp (arg, "spe"))
2213 {
a2db2771 2214 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2215 rs6000_spe_abi = 1;
2216 rs6000_altivec_abi = 0;
2217 if (!TARGET_SPE_ABI)
2218 error ("not configured for ABI: '%s'", arg);
2219 }
2220 else if (! strcmp (arg, "no-spe"))
d3603e8c 2221 {
a2db2771 2222 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2223 rs6000_spe_abi = 0;
2224 }
78f5898b
AH
2225
2226 /* These are here for testing during development only, do not
2227 document in the manual please. */
2228 else if (! strcmp (arg, "d64"))
2229 {
2230 rs6000_darwin64_abi = 1;
2231 warning (0, "Using darwin64 ABI");
2232 }
2233 else if (! strcmp (arg, "d32"))
2234 {
2235 rs6000_darwin64_abi = 0;
2236 warning (0, "Using old darwin ABI");
2237 }
2238
602ea4d3
JJ
2239 else if (! strcmp (arg, "ibmlongdouble"))
2240 {
d3603e8c 2241 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2242 rs6000_ieeequad = 0;
2243 warning (0, "Using IBM extended precision long double");
2244 }
2245 else if (! strcmp (arg, "ieeelongdouble"))
2246 {
d3603e8c 2247 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2248 rs6000_ieeequad = 1;
2249 warning (0, "Using IEEE extended precision long double");
2250 }
2251
78f5898b
AH
2252 else
2253 {
2254 error ("unknown ABI specified: '%s'", arg);
2255 return false;
2256 }
2257 break;
2258
2259 case OPT_mcpu_:
2260 rs6000_select[1].string = arg;
2261 break;
2262
2263 case OPT_mtune_:
2264 rs6000_select[2].string = arg;
2265 break;
2266
2267 case OPT_mtraceback_:
2268 rs6000_traceback_name = arg;
2269 break;
2270
2271 case OPT_mfloat_gprs_:
2272 rs6000_explicit_options.float_gprs = true;
2273 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2274 rs6000_float_gprs = 1;
2275 else if (! strcmp (arg, "double"))
2276 rs6000_float_gprs = 2;
2277 else if (! strcmp (arg, "no"))
2278 rs6000_float_gprs = 0;
2279 else
2280 {
2281 error ("invalid option for -mfloat-gprs: '%s'", arg);
2282 return false;
2283 }
2284 break;
2285
2286 case OPT_mlong_double_:
2287 rs6000_explicit_options.long_double = true;
2288 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2289 if (value != 64 && value != 128)
2290 {
2291 error ("Unknown switch -mlong-double-%s", arg);
2292 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2293 return false;
2294 }
2295 else
2296 rs6000_long_double_type_size = value;
2297 break;
2298
2299 case OPT_msched_costly_dep_:
2300 rs6000_sched_costly_dep_str = arg;
2301 break;
2302
2303 case OPT_malign_:
2304 rs6000_explicit_options.alignment = true;
2305 if (! strcmp (arg, "power"))
2306 {
2307 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2308 some C library functions, so warn about it. The flag may be
2309 useful for performance studies from time to time though, so
2310 don't disable it entirely. */
2311 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2312 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2313 " it is incompatible with the installed C and C++ libraries");
2314 rs6000_alignment_flags = MASK_ALIGN_POWER;
2315 }
2316 else if (! strcmp (arg, "natural"))
2317 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2318 else
2319 {
2320 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2321 return false;
2322 }
2323 break;
2324 }
2325 return true;
2326}
3cfa4909
MM
2327\f
2328/* Do anything needed at the start of the asm file. */
2329
1bc7c5b6 2330static void
863d938c 2331rs6000_file_start (void)
3cfa4909 2332{
c4d38ccb 2333 size_t i;
3cfa4909 2334 char buffer[80];
d330fd93 2335 const char *start = buffer;
3cfa4909 2336 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2337 const char *default_cpu = TARGET_CPU_DEFAULT;
2338 FILE *file = asm_out_file;
2339
2340 default_file_start ();
2341
2342#ifdef TARGET_BI_ARCH
2343 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2344 default_cpu = 0;
2345#endif
3cfa4909
MM
2346
2347 if (flag_verbose_asm)
2348 {
2349 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2350 rs6000_select[0].string = default_cpu;
2351
b6a1cbae 2352 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2353 {
2354 ptr = &rs6000_select[i];
2355 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2356 {
2357 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2358 start = "";
2359 }
2360 }
2361
9c6b4ed9 2362 if (PPC405_ERRATUM77)
b0bfee6e 2363 {
9c6b4ed9 2364 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2365 start = "";
2366 }
b0bfee6e 2367
b91da81f 2368#ifdef USING_ELFOS_H
3cfa4909
MM
2369 switch (rs6000_sdata)
2370 {
2371 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2372 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2373 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2374 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2375 }
2376
2377 if (rs6000_sdata && g_switch_value)
2378 {
307b599c
MK
2379 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2380 g_switch_value);
3cfa4909
MM
2381 start = "";
2382 }
2383#endif
2384
2385 if (*start == '\0')
949ea356 2386 putc ('\n', file);
3cfa4909 2387 }
b723e82f 2388
e51917ae
JM
2389#ifdef HAVE_AS_GNU_ATTRIBUTE
2390 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2391 {
2392 fprintf (file, "\t.gnu_attribute 4, %d\n",
2393 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2394 fprintf (file, "\t.gnu_attribute 8, %d\n",
2395 (TARGET_ALTIVEC_ABI ? 2
2396 : TARGET_SPE_ABI ? 3
2397 : 1));
2398 }
e51917ae
JM
2399#endif
2400
b723e82f
JJ
2401 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2402 {
d6b5193b
RS
2403 switch_to_section (toc_section);
2404 switch_to_section (text_section);
b723e82f 2405 }
3cfa4909 2406}
c4e18b1c 2407
5248c961 2408\f
a0ab749a 2409/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2410
2411int
863d938c 2412direct_return (void)
9878760c 2413{
4697a36c
MM
2414 if (reload_completed)
2415 {
2416 rs6000_stack_t *info = rs6000_stack_info ();
2417
2418 if (info->first_gp_reg_save == 32
2419 && info->first_fp_reg_save == 64
00b960c7 2420 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2421 && ! info->lr_save_p
2422 && ! info->cr_save_p
00b960c7 2423 && info->vrsave_mask == 0
c81fc13e 2424 && ! info->push_p)
4697a36c
MM
2425 return 1;
2426 }
2427
2428 return 0;
9878760c
RK
2429}
2430
4e74d8ec
MM
2431/* Return the number of instructions it takes to form a constant in an
2432 integer register. */
2433
48d72335 2434int
a2369ed3 2435num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2436{
2437 /* signed constant loadable with {cal|addi} */
547b216d 2438 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2439 return 1;
2440
4e74d8ec 2441 /* constant loadable with {cau|addis} */
547b216d
DE
2442 else if ((value & 0xffff) == 0
2443 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2444 return 1;
2445
5f59ecb7 2446#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2447 else if (TARGET_POWERPC64)
4e74d8ec 2448 {
a65c591c
DE
2449 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2450 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2451
a65c591c 2452 if (high == 0 || high == -1)
4e74d8ec
MM
2453 return 2;
2454
a65c591c 2455 high >>= 1;
4e74d8ec 2456
a65c591c 2457 if (low == 0)
4e74d8ec 2458 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2459 else
2460 return (num_insns_constant_wide (high)
e396202a 2461 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2462 }
2463#endif
2464
2465 else
2466 return 2;
2467}
2468
2469int
a2369ed3 2470num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2471{
37409796 2472 HOST_WIDE_INT low, high;
bb8df8a6 2473
37409796 2474 switch (GET_CODE (op))
0d30d435 2475 {
37409796 2476 case CONST_INT:
0d30d435 2477#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2478 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2479 && mask64_operand (op, mode))
c4ad648e 2480 return 2;
0d30d435
DE
2481 else
2482#endif
2483 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2484
37409796 2485 case CONST_DOUBLE:
e41b2a33 2486 if (mode == SFmode || mode == SDmode)
37409796
NS
2487 {
2488 long l;
2489 REAL_VALUE_TYPE rv;
bb8df8a6 2490
37409796 2491 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2492 if (DECIMAL_FLOAT_MODE_P (mode))
2493 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2494 else
2495 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2496 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2497 }
a260abc9 2498
37409796
NS
2499 if (mode == VOIDmode || mode == DImode)
2500 {
2501 high = CONST_DOUBLE_HIGH (op);
2502 low = CONST_DOUBLE_LOW (op);
2503 }
2504 else
2505 {
2506 long l[2];
2507 REAL_VALUE_TYPE rv;
bb8df8a6 2508
37409796 2509 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2510 if (DECIMAL_FLOAT_MODE_P (mode))
2511 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2512 else
2513 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2514 high = l[WORDS_BIG_ENDIAN == 0];
2515 low = l[WORDS_BIG_ENDIAN != 0];
2516 }
47ad8c61 2517
37409796
NS
2518 if (TARGET_32BIT)
2519 return (num_insns_constant_wide (low)
2520 + num_insns_constant_wide (high));
2521 else
2522 {
2523 if ((high == 0 && low >= 0)
2524 || (high == -1 && low < 0))
2525 return num_insns_constant_wide (low);
bb8df8a6 2526
1990cd79 2527 else if (mask64_operand (op, mode))
37409796 2528 return 2;
bb8df8a6 2529
37409796
NS
2530 else if (low == 0)
2531 return num_insns_constant_wide (high) + 1;
bb8df8a6 2532
37409796
NS
2533 else
2534 return (num_insns_constant_wide (high)
2535 + num_insns_constant_wide (low) + 1);
2536 }
bb8df8a6 2537
37409796
NS
2538 default:
2539 gcc_unreachable ();
4e74d8ec 2540 }
4e74d8ec
MM
2541}
2542
0972012c
RS
2543/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2544 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2545 corresponding element of the vector, but for V4SFmode and V2SFmode,
2546 the corresponding "float" is interpreted as an SImode integer. */
2547
2548static HOST_WIDE_INT
2549const_vector_elt_as_int (rtx op, unsigned int elt)
2550{
2551 rtx tmp = CONST_VECTOR_ELT (op, elt);
2552 if (GET_MODE (op) == V4SFmode
2553 || GET_MODE (op) == V2SFmode)
2554 tmp = gen_lowpart (SImode, tmp);
2555 return INTVAL (tmp);
2556}
452a7d36 2557
77ccdfed 2558/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2559 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2560 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2561 all items are set to the same value and contain COPIES replicas of the
2562 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2563 operand and the others are set to the value of the operand's msb. */
2564
2565static bool
2566vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2567{
66180ff3
PB
2568 enum machine_mode mode = GET_MODE (op);
2569 enum machine_mode inner = GET_MODE_INNER (mode);
2570
2571 unsigned i;
2572 unsigned nunits = GET_MODE_NUNITS (mode);
2573 unsigned bitsize = GET_MODE_BITSIZE (inner);
2574 unsigned mask = GET_MODE_MASK (inner);
2575
0972012c 2576 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2577 HOST_WIDE_INT splat_val = val;
2578 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2579
2580 /* Construct the value to be splatted, if possible. If not, return 0. */
2581 for (i = 2; i <= copies; i *= 2)
452a7d36 2582 {
66180ff3
PB
2583 HOST_WIDE_INT small_val;
2584 bitsize /= 2;
2585 small_val = splat_val >> bitsize;
2586 mask >>= bitsize;
2587 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2588 return false;
2589 splat_val = small_val;
2590 }
c4ad648e 2591
66180ff3
PB
2592 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2593 if (EASY_VECTOR_15 (splat_val))
2594 ;
2595
2596 /* Also check if we can splat, and then add the result to itself. Do so if
2597 the value is positive, of if the splat instruction is using OP's mode;
2598 for splat_val < 0, the splat and the add should use the same mode. */
2599 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2600 && (splat_val >= 0 || (step == 1 && copies == 1)))
2601 ;
2602
2603 else
2604 return false;
2605
2606 /* Check if VAL is present in every STEP-th element, and the
2607 other elements are filled with its most significant bit. */
2608 for (i = 0; i < nunits - 1; ++i)
2609 {
2610 HOST_WIDE_INT desired_val;
2611 if (((i + 1) & (step - 1)) == 0)
2612 desired_val = val;
2613 else
2614 desired_val = msb_val;
2615
0972012c 2616 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2617 return false;
452a7d36 2618 }
66180ff3
PB
2619
2620 return true;
452a7d36
HP
2621}
2622
69ef87e2 2623
77ccdfed 2624/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2625 with a vspltisb, vspltish or vspltisw. */
2626
2627bool
2628easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2629{
66180ff3 2630 unsigned step, copies;
d744e06e 2631
66180ff3
PB
2632 if (mode == VOIDmode)
2633 mode = GET_MODE (op);
2634 else if (mode != GET_MODE (op))
2635 return false;
d744e06e 2636
66180ff3
PB
2637 /* Start with a vspltisw. */
2638 step = GET_MODE_NUNITS (mode) / 4;
2639 copies = 1;
2640
2641 if (vspltis_constant (op, step, copies))
2642 return true;
2643
2644 /* Then try with a vspltish. */
2645 if (step == 1)
2646 copies <<= 1;
2647 else
2648 step >>= 1;
2649
2650 if (vspltis_constant (op, step, copies))
2651 return true;
2652
2653 /* And finally a vspltisb. */
2654 if (step == 1)
2655 copies <<= 1;
2656 else
2657 step >>= 1;
2658
2659 if (vspltis_constant (op, step, copies))
2660 return true;
2661
2662 return false;
d744e06e
AH
2663}
2664
66180ff3
PB
2665/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2666 result is OP. Abort if it is not possible. */
d744e06e 2667
f676971a 2668rtx
66180ff3 2669gen_easy_altivec_constant (rtx op)
452a7d36 2670{
66180ff3
PB
2671 enum machine_mode mode = GET_MODE (op);
2672 int nunits = GET_MODE_NUNITS (mode);
2673 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2674 unsigned step = nunits / 4;
2675 unsigned copies = 1;
2676
2677 /* Start with a vspltisw. */
2678 if (vspltis_constant (op, step, copies))
2679 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2680
2681 /* Then try with a vspltish. */
2682 if (step == 1)
2683 copies <<= 1;
2684 else
2685 step >>= 1;
2686
2687 if (vspltis_constant (op, step, copies))
2688 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2689
2690 /* And finally a vspltisb. */
2691 if (step == 1)
2692 copies <<= 1;
2693 else
2694 step >>= 1;
2695
2696 if (vspltis_constant (op, step, copies))
2697 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2698
2699 gcc_unreachable ();
d744e06e
AH
2700}
2701
2702const char *
a2369ed3 2703output_vec_const_move (rtx *operands)
d744e06e
AH
2704{
2705 int cst, cst2;
2706 enum machine_mode mode;
2707 rtx dest, vec;
2708
2709 dest = operands[0];
2710 vec = operands[1];
d744e06e 2711 mode = GET_MODE (dest);
69ef87e2 2712
d744e06e
AH
2713 if (TARGET_ALTIVEC)
2714 {
66180ff3 2715 rtx splat_vec;
d744e06e
AH
2716 if (zero_constant (vec, mode))
2717 return "vxor %0,%0,%0";
37409796 2718
66180ff3
PB
2719 splat_vec = gen_easy_altivec_constant (vec);
2720 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2721 operands[1] = XEXP (splat_vec, 0);
2722 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2723 return "#";
bb8df8a6 2724
66180ff3 2725 switch (GET_MODE (splat_vec))
98ef3137 2726 {
37409796 2727 case V4SImode:
66180ff3 2728 return "vspltisw %0,%1";
c4ad648e 2729
37409796 2730 case V8HImode:
66180ff3 2731 return "vspltish %0,%1";
c4ad648e 2732
37409796 2733 case V16QImode:
66180ff3 2734 return "vspltisb %0,%1";
bb8df8a6 2735
37409796
NS
2736 default:
2737 gcc_unreachable ();
98ef3137 2738 }
69ef87e2
AH
2739 }
2740
37409796 2741 gcc_assert (TARGET_SPE);
bb8df8a6 2742
37409796
NS
2743 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2744 pattern of V1DI, V4HI, and V2SF.
2745
2746 FIXME: We should probably return # and add post reload
2747 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2748 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2749 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2750 operands[1] = CONST_VECTOR_ELT (vec, 0);
2751 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2752 if (cst == cst2)
2753 return "li %0,%1\n\tevmergelo %0,%0,%0";
2754 else
2755 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2756}
2757
f5027409
RE
2758/* Initialize TARGET of vector PAIRED to VALS. */
2759
2760void
2761paired_expand_vector_init (rtx target, rtx vals)
2762{
2763 enum machine_mode mode = GET_MODE (target);
2764 int n_elts = GET_MODE_NUNITS (mode);
2765 int n_var = 0;
2766 rtx x, new, tmp, constant_op, op1, op2;
2767 int i;
2768
2769 for (i = 0; i < n_elts; ++i)
2770 {
2771 x = XVECEXP (vals, 0, i);
2772 if (!CONSTANT_P (x))
2773 ++n_var;
2774 }
2775 if (n_var == 0)
2776 {
2777 /* Load from constant pool. */
2778 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2779 return;
2780 }
2781
2782 if (n_var == 2)
2783 {
2784 /* The vector is initialized only with non-constants. */
2785 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2786 XVECEXP (vals, 0, 1));
2787
2788 emit_move_insn (target, new);
2789 return;
2790 }
2791
2792 /* One field is non-constant and the other one is a constant. Load the
2793 constant from the constant pool and use ps_merge instruction to
2794 construct the whole vector. */
2795 op1 = XVECEXP (vals, 0, 0);
2796 op2 = XVECEXP (vals, 0, 1);
2797
2798 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2799
2800 tmp = gen_reg_rtx (GET_MODE (constant_op));
2801 emit_move_insn (tmp, constant_op);
2802
2803 if (CONSTANT_P (op1))
2804 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2805 else
2806 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2807
2808 emit_move_insn (target, new);
2809}
2810
e2e95f45
RE
2811void
2812paired_expand_vector_move (rtx operands[])
2813{
2814 rtx op0 = operands[0], op1 = operands[1];
2815
2816 emit_move_insn (op0, op1);
2817}
2818
2819/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2820 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2821 operands for the relation operation COND. This is a recursive
2822 function. */
2823
2824static void
2825paired_emit_vector_compare (enum rtx_code rcode,
2826 rtx dest, rtx op0, rtx op1,
2827 rtx cc_op0, rtx cc_op1)
2828{
2829 rtx tmp = gen_reg_rtx (V2SFmode);
2830 rtx tmp1, max, min, equal_zero;
2831
2832 gcc_assert (TARGET_PAIRED_FLOAT);
2833 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2834
2835 switch (rcode)
2836 {
2837 case LT:
2838 case LTU:
2839 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2840 return;
2841 case GE:
2842 case GEU:
2843 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2844 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2845 return;
2846 case LE:
2847 case LEU:
2848 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2849 return;
2850 case GT:
2851 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2852 return;
2853 case EQ:
2854 tmp1 = gen_reg_rtx (V2SFmode);
2855 max = gen_reg_rtx (V2SFmode);
2856 min = gen_reg_rtx (V2SFmode);
2857 equal_zero = gen_reg_rtx (V2SFmode);
2858
2859 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2860 emit_insn (gen_selv2sf4
2861 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2862 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2863 emit_insn (gen_selv2sf4
2864 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2865 emit_insn (gen_subv2sf3 (tmp1, min, max));
2866 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2867 return;
2868 case NE:
2869 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2870 return;
2871 case UNLE:
2872 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2873 return;
2874 case UNLT:
2875 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2876 return;
2877 case UNGE:
2878 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2879 return;
2880 case UNGT:
2881 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2882 return;
2883 default:
2884 gcc_unreachable ();
2885 }
2886
2887 return;
2888}
2889
2890/* Emit vector conditional expression.
2891 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2892 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2893
2894int
2895paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2896 rtx cond, rtx cc_op0, rtx cc_op1)
2897{
2898 enum rtx_code rcode = GET_CODE (cond);
2899
2900 if (!TARGET_PAIRED_FLOAT)
2901 return 0;
2902
2903 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2904
2905 return 1;
2906}
2907
7a4eca66
DE
2908/* Initialize vector TARGET to VALS. */
2909
2910void
2911rs6000_expand_vector_init (rtx target, rtx vals)
2912{
2913 enum machine_mode mode = GET_MODE (target);
2914 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2915 int n_elts = GET_MODE_NUNITS (mode);
2916 int n_var = 0, one_var = -1;
2917 bool all_same = true, all_const_zero = true;
2918 rtx x, mem;
2919 int i;
2920
2921 for (i = 0; i < n_elts; ++i)
2922 {
2923 x = XVECEXP (vals, 0, i);
2924 if (!CONSTANT_P (x))
2925 ++n_var, one_var = i;
2926 else if (x != CONST0_RTX (inner_mode))
2927 all_const_zero = false;
2928
2929 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2930 all_same = false;
2931 }
2932
2933 if (n_var == 0)
2934 {
2935 if (mode != V4SFmode && all_const_zero)
2936 {
2937 /* Zero register. */
2938 emit_insn (gen_rtx_SET (VOIDmode, target,
2939 gen_rtx_XOR (mode, target, target)));
2940 return;
2941 }
66180ff3 2942 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2943 {
2944 /* Splat immediate. */
66180ff3 2945 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2946 return;
2947 }
2948 else if (all_same)
2949 ; /* Splat vector element. */
2950 else
2951 {
2952 /* Load from constant pool. */
2953 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2954 return;
2955 }
2956 }
2957
2958 /* Store value to stack temp. Load vector element. Splat. */
2959 if (all_same)
2960 {
2961 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2962 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2963 XVECEXP (vals, 0, 0));
2964 x = gen_rtx_UNSPEC (VOIDmode,
2965 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2966 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2967 gen_rtvec (2,
2968 gen_rtx_SET (VOIDmode,
2969 target, mem),
2970 x)));
2971 x = gen_rtx_VEC_SELECT (inner_mode, target,
2972 gen_rtx_PARALLEL (VOIDmode,
2973 gen_rtvec (1, const0_rtx)));
2974 emit_insn (gen_rtx_SET (VOIDmode, target,
2975 gen_rtx_VEC_DUPLICATE (mode, x)));
2976 return;
2977 }
2978
2979 /* One field is non-constant. Load constant then overwrite
2980 varying field. */
2981 if (n_var == 1)
2982 {
2983 rtx copy = copy_rtx (vals);
2984
57b51d4d 2985 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2986 varying element. */
2987 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2988 rs6000_expand_vector_init (target, copy);
2989
2990 /* Insert variable. */
2991 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2992 return;
2993 }
2994
2995 /* Construct the vector in memory one field at a time
2996 and load the whole vector. */
2997 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2998 for (i = 0; i < n_elts; i++)
2999 emit_move_insn (adjust_address_nv (mem, inner_mode,
3000 i * GET_MODE_SIZE (inner_mode)),
3001 XVECEXP (vals, 0, i));
3002 emit_move_insn (target, mem);
3003}
3004
3005/* Set field ELT of TARGET to VAL. */
3006
3007void
3008rs6000_expand_vector_set (rtx target, rtx val, int elt)
3009{
3010 enum machine_mode mode = GET_MODE (target);
3011 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3012 rtx reg = gen_reg_rtx (mode);
3013 rtx mask, mem, x;
3014 int width = GET_MODE_SIZE (inner_mode);
3015 int i;
3016
3017 /* Load single variable value. */
3018 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3019 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3020 x = gen_rtx_UNSPEC (VOIDmode,
3021 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3022 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3023 gen_rtvec (2,
3024 gen_rtx_SET (VOIDmode,
3025 reg, mem),
3026 x)));
3027
3028 /* Linear sequence. */
3029 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3030 for (i = 0; i < 16; ++i)
3031 XVECEXP (mask, 0, i) = GEN_INT (i);
3032
3033 /* Set permute mask to insert element into target. */
3034 for (i = 0; i < width; ++i)
3035 XVECEXP (mask, 0, elt*width + i)
3036 = GEN_INT (i + 0x10);
3037 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3038 x = gen_rtx_UNSPEC (mode,
3039 gen_rtvec (3, target, reg,
3040 force_reg (V16QImode, x)),
3041 UNSPEC_VPERM);
3042 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3043}
3044
3045/* Extract field ELT from VEC into TARGET. */
3046
3047void
3048rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3049{
3050 enum machine_mode mode = GET_MODE (vec);
3051 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3052 rtx mem, x;
3053
3054 /* Allocate mode-sized buffer. */
3055 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3056
3057 /* Add offset to field within buffer matching vector element. */
3058 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3059
3060 /* Store single field into mode-sized buffer. */
3061 x = gen_rtx_UNSPEC (VOIDmode,
3062 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3063 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3064 gen_rtvec (2,
3065 gen_rtx_SET (VOIDmode,
3066 mem, vec),
3067 x)));
3068 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3069}
3070
0ba1b2ff
AM
3071/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3072 implement ANDing by the mask IN. */
3073void
a2369ed3 3074build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3075{
3076#if HOST_BITS_PER_WIDE_INT >= 64
3077 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3078 int shift;
3079
37409796 3080 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3081
3082 c = INTVAL (in);
3083 if (c & 1)
3084 {
3085 /* Assume c initially something like 0x00fff000000fffff. The idea
3086 is to rotate the word so that the middle ^^^^^^ group of zeros
3087 is at the MS end and can be cleared with an rldicl mask. We then
3088 rotate back and clear off the MS ^^ group of zeros with a
3089 second rldicl. */
3090 c = ~c; /* c == 0xff000ffffff00000 */
3091 lsb = c & -c; /* lsb == 0x0000000000100000 */
3092 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3093 c = ~c; /* c == 0x00fff000000fffff */
3094 c &= -lsb; /* c == 0x00fff00000000000 */
3095 lsb = c & -c; /* lsb == 0x0000100000000000 */
3096 c = ~c; /* c == 0xff000fffffffffff */
3097 c &= -lsb; /* c == 0xff00000000000000 */
3098 shift = 0;
3099 while ((lsb >>= 1) != 0)
3100 shift++; /* shift == 44 on exit from loop */
3101 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3102 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3103 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3104 }
3105 else
0ba1b2ff
AM
3106 {
3107 /* Assume c initially something like 0xff000f0000000000. The idea
3108 is to rotate the word so that the ^^^ middle group of zeros
3109 is at the LS end and can be cleared with an rldicr mask. We then
3110 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3111 a second rldicr. */
3112 lsb = c & -c; /* lsb == 0x0000010000000000 */
3113 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3114 c = ~c; /* c == 0x00fff0ffffffffff */
3115 c &= -lsb; /* c == 0x00fff00000000000 */
3116 lsb = c & -c; /* lsb == 0x0000100000000000 */
3117 c = ~c; /* c == 0xff000fffffffffff */
3118 c &= -lsb; /* c == 0xff00000000000000 */
3119 shift = 0;
3120 while ((lsb >>= 1) != 0)
3121 shift++; /* shift == 44 on exit from loop */
3122 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3123 m1 >>= shift; /* m1 == 0x0000000000000fff */
3124 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3125 }
3126
3127 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3128 masks will be all 1's. We are guaranteed more than one transition. */
3129 out[0] = GEN_INT (64 - shift);
3130 out[1] = GEN_INT (m1);
3131 out[2] = GEN_INT (shift);
3132 out[3] = GEN_INT (m2);
3133#else
045572c7
GK
3134 (void)in;
3135 (void)out;
37409796 3136 gcc_unreachable ();
0ba1b2ff 3137#endif
a260abc9
DE
3138}
3139
54b695e7 3140/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3141
3142bool
54b695e7
AH
3143invalid_e500_subreg (rtx op, enum machine_mode mode)
3144{
61c76239
JM
3145 if (TARGET_E500_DOUBLE)
3146 {
17caeff2
JM
3147 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3148 subreg:TI and reg:TF. */
61c76239 3149 if (GET_CODE (op) == SUBREG
17caeff2 3150 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3151 && REG_P (SUBREG_REG (op))
17caeff2 3152 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3153 || GET_MODE (SUBREG_REG (op)) == TFmode
3154 || GET_MODE (SUBREG_REG (op)) == DDmode
3155 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3156 return true;
3157
17caeff2
JM
3158 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3159 reg:TI. */
61c76239 3160 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3161 && (mode == DFmode || mode == TFmode
3162 || mode == DDmode || mode == TDmode)
61c76239 3163 && REG_P (SUBREG_REG (op))
17caeff2
JM
3164 && (GET_MODE (SUBREG_REG (op)) == DImode
3165 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3166 return true;
3167 }
54b695e7 3168
61c76239
JM
3169 if (TARGET_SPE
3170 && GET_CODE (op) == SUBREG
3171 && mode == SImode
54b695e7 3172 && REG_P (SUBREG_REG (op))
14502dad 3173 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3174 return true;
3175
3176 return false;
3177}
3178
58182de3 3179/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3180 field is an FP double while the FP fields remain word aligned. */
3181
19d66194 3182unsigned int
fa5b0972
AM
3183rs6000_special_round_type_align (tree type, unsigned int computed,
3184 unsigned int specified)
95727fb8 3185{
fa5b0972 3186 unsigned int align = MAX (computed, specified);
95727fb8 3187 tree field = TYPE_FIELDS (type);
95727fb8 3188
bb8df8a6 3189 /* Skip all non field decls */
85962ac8 3190 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3191 field = TREE_CHAIN (field);
3192
fa5b0972
AM
3193 if (field != NULL && field != type)
3194 {
3195 type = TREE_TYPE (field);
3196 while (TREE_CODE (type) == ARRAY_TYPE)
3197 type = TREE_TYPE (type);
3198
3199 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3200 align = MAX (align, 64);
3201 }
95727fb8 3202
fa5b0972 3203 return align;
95727fb8
AP
3204}
3205
58182de3
GK
3206/* Darwin increases record alignment to the natural alignment of
3207 the first field. */
3208
3209unsigned int
3210darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3211 unsigned int specified)
3212{
3213 unsigned int align = MAX (computed, specified);
3214
3215 if (TYPE_PACKED (type))
3216 return align;
3217
3218 /* Find the first field, looking down into aggregates. */
3219 do {
3220 tree field = TYPE_FIELDS (type);
3221 /* Skip all non field decls */
3222 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3223 field = TREE_CHAIN (field);
3224 if (! field)
3225 break;
3226 type = TREE_TYPE (field);
3227 while (TREE_CODE (type) == ARRAY_TYPE)
3228 type = TREE_TYPE (type);
3229 } while (AGGREGATE_TYPE_P (type));
3230
3231 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3232 align = MAX (align, TYPE_ALIGN (type));
3233
3234 return align;
3235}
3236
a4f6c312 3237/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3238
3239int
f676971a 3240small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3241 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3242{
38c1f2d7 3243#if TARGET_ELF
5f59ecb7 3244 rtx sym_ref;
7509c759 3245
d9407988 3246 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3247 return 0;
a54d04b7 3248
f607bc57 3249 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3250 return 0;
3251
2aa42e6e
NF
3252 /* Vector and float memory instructions have a limited offset on the
3253 SPE, so using a vector or float variable directly as an operand is
3254 not useful. */
3255 if (TARGET_SPE
3256 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3257 return 0;
3258
88228c4b
MM
3259 if (GET_CODE (op) == SYMBOL_REF)
3260 sym_ref = op;
3261
3262 else if (GET_CODE (op) != CONST
3263 || GET_CODE (XEXP (op, 0)) != PLUS
3264 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3265 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3266 return 0;
3267
88228c4b 3268 else
dbf55e53
MM
3269 {
3270 rtx sum = XEXP (op, 0);
3271 HOST_WIDE_INT summand;
3272
3273 /* We have to be careful here, because it is the referenced address
c4ad648e 3274 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3275 summand = INTVAL (XEXP (sum, 1));
307b599c 3276 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3277 return 0;
dbf55e53
MM
3278
3279 sym_ref = XEXP (sum, 0);
3280 }
88228c4b 3281
20bfcd69 3282 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3283#else
3284 return 0;
3285#endif
7509c759 3286}
46c07df8 3287
3a1f863f 3288/* Return true if either operand is a general purpose register. */
46c07df8 3289
3a1f863f
DE
3290bool
3291gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3292{
3a1f863f
DE
3293 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3294 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3295}
3296
9ebbca7d 3297\f
4d588c14
RH
3298/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3299
f676971a
EC
3300static int
3301constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3302{
9390387d 3303 switch (GET_CODE (op))
9ebbca7d
GK
3304 {
3305 case SYMBOL_REF:
c4501e62
JJ
3306 if (RS6000_SYMBOL_REF_TLS_P (op))
3307 return 0;
3308 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3309 {
3310 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3311 {
3312 *have_sym = 1;
3313 return 1;
3314 }
3315 else
3316 return 0;
3317 }
3318 else if (! strcmp (XSTR (op, 0), toc_label_name))
3319 {
3320 *have_toc = 1;
3321 return 1;
3322 }
3323 else
3324 return 0;
9ebbca7d
GK
3325 case PLUS:
3326 case MINUS:
c1f11548
DE
3327 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3328 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3329 case CONST:
a4f6c312 3330 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3331 case CONST_INT:
a4f6c312 3332 return 1;
9ebbca7d 3333 default:
a4f6c312 3334 return 0;
9ebbca7d
GK
3335 }
3336}
3337
4d588c14 3338static bool
a2369ed3 3339constant_pool_expr_p (rtx op)
9ebbca7d
GK
3340{
3341 int have_sym = 0;
3342 int have_toc = 0;
3343 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3344}
3345
48d72335 3346bool
a2369ed3 3347toc_relative_expr_p (rtx op)
9ebbca7d 3348{
4d588c14
RH
3349 int have_sym = 0;
3350 int have_toc = 0;
3351 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3352}
3353
4d588c14 3354bool
a2369ed3 3355legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3356{
3357 return (TARGET_TOC
3358 && GET_CODE (x) == PLUS
3359 && GET_CODE (XEXP (x, 0)) == REG
3360 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3361 && constant_pool_expr_p (XEXP (x, 1)));
3362}
3363
d04b6e6e
EB
3364static bool
3365legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3366{
3367 return (DEFAULT_ABI == ABI_V4
3368 && !flag_pic && !TARGET_TOC
3369 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3370 && small_data_operand (x, mode));
3371}
3372
60cdabab
DE
3373/* SPE offset addressing is limited to 5-bits worth of double words. */
3374#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3375
76d2b81d
DJ
3376bool
3377rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3378{
3379 unsigned HOST_WIDE_INT offset, extra;
3380
3381 if (GET_CODE (x) != PLUS)
3382 return false;
3383 if (GET_CODE (XEXP (x, 0)) != REG)
3384 return false;
3385 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3386 return false;
60cdabab
DE
3387 if (legitimate_constant_pool_address_p (x))
3388 return true;
4d588c14
RH
3389 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3390 return false;
3391
3392 offset = INTVAL (XEXP (x, 1));
3393 extra = 0;
3394 switch (mode)
3395 {
3396 case V16QImode:
3397 case V8HImode:
3398 case V4SFmode:
3399 case V4SImode:
7a4eca66 3400 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3401 constant offset zero should not occur due to canonicalization. */
3402 return false;
4d588c14
RH
3403
3404 case V4HImode:
3405 case V2SImode:
3406 case V1DImode:
3407 case V2SFmode:
d42a3bae 3408 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3409 constant offset zero should not occur due to canonicalization. */
d42a3bae 3410 if (TARGET_PAIRED_FLOAT)
1a23970d 3411 return false;
4d588c14
RH
3412 /* SPE vector modes. */
3413 return SPE_CONST_OFFSET_OK (offset);
3414
3415 case DFmode:
7393f7f8 3416 case DDmode:
4d4cbc0e
AH
3417 if (TARGET_E500_DOUBLE)
3418 return SPE_CONST_OFFSET_OK (offset);
3419
4d588c14 3420 case DImode:
54b695e7
AH
3421 /* On e500v2, we may have:
3422
3423 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3424
3425 Which gets addressed with evldd instructions. */
3426 if (TARGET_E500_DOUBLE)
3427 return SPE_CONST_OFFSET_OK (offset);
3428
7393f7f8 3429 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3430 extra = 4;
3431 else if (offset & 3)
3432 return false;
3433 break;
3434
3435 case TFmode:
4d4447b5 3436 case TDmode:
17caeff2
JM
3437 if (TARGET_E500_DOUBLE)
3438 return (SPE_CONST_OFFSET_OK (offset)
3439 && SPE_CONST_OFFSET_OK (offset + 8));
3440
4d588c14 3441 case TImode:
7393f7f8 3442 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3443 extra = 12;
3444 else if (offset & 3)
3445 return false;
3446 else
3447 extra = 8;
3448 break;
3449
3450 default:
3451 break;
3452 }
3453
b1917422
AM
3454 offset += 0x8000;
3455 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3456}
3457
6fb5fa3c 3458bool
a2369ed3 3459legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3460{
3461 rtx op0, op1;
3462
3463 if (GET_CODE (x) != PLUS)
3464 return false;
850e8d3d 3465
4d588c14
RH
3466 op0 = XEXP (x, 0);
3467 op1 = XEXP (x, 1);
3468
bf00cc0f 3469 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3470 replaced with proper base and index regs. */
3471 if (!strict
3472 && reload_in_progress
3473 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3474 && REG_P (op1))
3475 return true;
3476
3477 return (REG_P (op0) && REG_P (op1)
3478 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3479 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3480 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3481 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3482}
3483
48d72335 3484inline bool
a2369ed3 3485legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3486{
3487 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3488}
3489
48d72335 3490bool
4c81e946
FJ
3491macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3492{
c4ad648e 3493 if (!TARGET_MACHO || !flag_pic
9390387d 3494 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3495 return false;
3496 x = XEXP (x, 0);
4c81e946
FJ
3497
3498 if (GET_CODE (x) != LO_SUM)
3499 return false;
3500 if (GET_CODE (XEXP (x, 0)) != REG)
3501 return false;
3502 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3503 return false;
3504 x = XEXP (x, 1);
3505
3506 return CONSTANT_P (x);
3507}
3508
4d588c14 3509static bool
a2369ed3 3510legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3511{
3512 if (GET_CODE (x) != LO_SUM)
3513 return false;
3514 if (GET_CODE (XEXP (x, 0)) != REG)
3515 return false;
3516 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3517 return false;
54b695e7 3518 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3519 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3520 || mode == DDmode || mode == TDmode
17caeff2 3521 || mode == DImode))
f82f556d 3522 return false;
4d588c14
RH
3523 x = XEXP (x, 1);
3524
8622e235 3525 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3526 {
a29077da 3527 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3528 return false;
3529 if (TARGET_TOC)
3530 return false;
3531 if (GET_MODE_NUNITS (mode) != 1)
3532 return false;
5e5f01b9 3533 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3534 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3535 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3536 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3537 return false;
3538
3539 return CONSTANT_P (x);
3540 }
3541
3542 return false;
3543}
3544
3545
9ebbca7d
GK
3546/* Try machine-dependent ways of modifying an illegitimate address
3547 to be legitimate. If we find one, return the new, valid address.
3548 This is used from only one place: `memory_address' in explow.c.
3549
a4f6c312
SS
3550 OLDX is the address as it was before break_out_memory_refs was
3551 called. In some cases it is useful to look at this to decide what
3552 needs to be done.
9ebbca7d 3553
a4f6c312 3554 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3555
a4f6c312
SS
3556 It is always safe for this function to do nothing. It exists to
3557 recognize opportunities to optimize the output.
9ebbca7d
GK
3558
3559 On RS/6000, first check for the sum of a register with a constant
3560 integer that is out of range. If so, generate code to add the
3561 constant with the low-order 16 bits masked to the register and force
3562 this result into another register (this can be done with `cau').
3563 Then generate an address of REG+(CONST&0xffff), allowing for the
3564 possibility of bit 16 being a one.
3565
3566 Then check for the sum of a register and something not constant, try to
3567 load the other things into a register and return the sum. */
4d588c14 3568
9ebbca7d 3569rtx
a2369ed3
DJ
3570rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3571 enum machine_mode mode)
0ac081f6 3572{
c4501e62
JJ
3573 if (GET_CODE (x) == SYMBOL_REF)
3574 {
3575 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3576 if (model != 0)
3577 return rs6000_legitimize_tls_address (x, model);
3578 }
3579
f676971a 3580 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3581 && GET_CODE (XEXP (x, 0)) == REG
3582 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3583 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3584 && !(SPE_VECTOR_MODE (mode)
3585 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3586 || mode == DImode))))
f676971a 3587 {
9ebbca7d
GK
3588 HOST_WIDE_INT high_int, low_int;
3589 rtx sum;
a65c591c
DE
3590 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3591 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3592 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3593 GEN_INT (high_int)), 0);
3594 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3595 }
f676971a 3596 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3597 && GET_CODE (XEXP (x, 0)) == REG
3598 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3599 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3600 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3601 || TARGET_POWERPC64
7393f7f8
BE
3602 || (((mode != DImode && mode != DFmode && mode != DDmode)
3603 || TARGET_E500_DOUBLE)
3604 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3605 && (TARGET_POWERPC64 || mode != DImode)
3606 && mode != TImode)
3607 {
3608 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3609 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3610 }
0ac081f6
AH
3611 else if (ALTIVEC_VECTOR_MODE (mode))
3612 {
3613 rtx reg;
3614
3615 /* Make sure both operands are registers. */
3616 if (GET_CODE (x) == PLUS)
9f85ed45 3617 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3618 force_reg (Pmode, XEXP (x, 1)));
3619
3620 reg = force_reg (Pmode, x);
3621 return reg;
3622 }
4d4cbc0e 3623 else if (SPE_VECTOR_MODE (mode)
17caeff2 3624 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3625 || mode == DDmode || mode == TDmode
54b695e7 3626 || mode == DImode)))
a3170dc6 3627 {
54b695e7
AH
3628 if (mode == DImode)
3629 return NULL_RTX;
a3170dc6
AH
3630 /* We accept [reg + reg] and [reg + OFFSET]. */
3631
3632 if (GET_CODE (x) == PLUS)
61dd226f
NF
3633 {
3634 rtx op1 = XEXP (x, 0);
3635 rtx op2 = XEXP (x, 1);
3636 rtx y;
3637
3638 op1 = force_reg (Pmode, op1);
3639
3640 if (GET_CODE (op2) != REG
3641 && (GET_CODE (op2) != CONST_INT
3642 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3643 || (GET_MODE_SIZE (mode) > 8
3644 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3645 op2 = force_reg (Pmode, op2);
3646
3647 /* We can't always do [reg + reg] for these, because [reg +
3648 reg + offset] is not a legitimate addressing mode. */
3649 y = gen_rtx_PLUS (Pmode, op1, op2);
3650
3651 if (GET_MODE_SIZE (mode) > 8 && REG_P (op2))
3652 return force_reg (Pmode, y);
3653 else
3654 return y;
3655 }
a3170dc6
AH
3656
3657 return force_reg (Pmode, x);
3658 }
f1384257
AM
3659 else if (TARGET_ELF
3660 && TARGET_32BIT
3661 && TARGET_NO_TOC
3662 && ! flag_pic
9ebbca7d 3663 && GET_CODE (x) != CONST_INT
f676971a 3664 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3665 && CONSTANT_P (x)
6ac7bf2c
GK
3666 && GET_MODE_NUNITS (mode) == 1
3667 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3668 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3669 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3670 {
3671 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3672 emit_insn (gen_elf_high (reg, x));
3673 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3674 }
ee890fe2
SS
3675 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3676 && ! flag_pic
ab82a49f
AP
3677#if TARGET_MACHO
3678 && ! MACHO_DYNAMIC_NO_PIC_P
3679#endif
ee890fe2 3680 && GET_CODE (x) != CONST_INT
f676971a 3681 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3682 && CONSTANT_P (x)
4d4447b5
PB
3683 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3684 || (mode != DFmode && mode != DDmode))
f676971a 3685 && mode != DImode
ee890fe2
SS
3686 && mode != TImode)
3687 {
3688 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3689 emit_insn (gen_macho_high (reg, x));
3690 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3691 }
f676971a 3692 else if (TARGET_TOC
4d588c14 3693 && constant_pool_expr_p (x)
a9098fd0 3694 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3695 {
3696 return create_TOC_reference (x);
3697 }
3698 else
3699 return NULL_RTX;
3700}
258bfae2 3701
fdbe66f2 3702/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3703 We need to emit DTP-relative relocations. */
3704
fdbe66f2 3705static void
c973d557
JJ
3706rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3707{
3708 switch (size)
3709 {
3710 case 4:
3711 fputs ("\t.long\t", file);
3712 break;
3713 case 8:
3714 fputs (DOUBLE_INT_ASM_OP, file);
3715 break;
3716 default:
37409796 3717 gcc_unreachable ();
c973d557
JJ
3718 }
3719 output_addr_const (file, x);
3720 fputs ("@dtprel+0x8000", file);
3721}
3722
c4501e62
JJ
3723/* Construct the SYMBOL_REF for the tls_get_addr function. */
3724
3725static GTY(()) rtx rs6000_tls_symbol;
3726static rtx
863d938c 3727rs6000_tls_get_addr (void)
c4501e62
JJ
3728{
3729 if (!rs6000_tls_symbol)
3730 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3731
3732 return rs6000_tls_symbol;
3733}
3734
3735/* Construct the SYMBOL_REF for TLS GOT references. */
3736
3737static GTY(()) rtx rs6000_got_symbol;
3738static rtx
863d938c 3739rs6000_got_sym (void)
c4501e62
JJ
3740{
3741 if (!rs6000_got_symbol)
3742 {
3743 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3744 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3745 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3746 }
c4501e62
JJ
3747
3748 return rs6000_got_symbol;
3749}
3750
3751/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3752 this (thread-local) address. */
3753
3754static rtx
a2369ed3 3755rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3756{
3757 rtx dest, insn;
3758
3759 dest = gen_reg_rtx (Pmode);
3760 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3761 {
3762 rtx tlsreg;
3763
3764 if (TARGET_64BIT)
3765 {
3766 tlsreg = gen_rtx_REG (Pmode, 13);
3767 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3768 }
3769 else
3770 {
3771 tlsreg = gen_rtx_REG (Pmode, 2);
3772 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3773 }
3774 emit_insn (insn);
3775 }
3776 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3777 {
3778 rtx tlsreg, tmp;
3779
3780 tmp = gen_reg_rtx (Pmode);
3781 if (TARGET_64BIT)
3782 {
3783 tlsreg = gen_rtx_REG (Pmode, 13);
3784 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3785 }
3786 else
3787 {
3788 tlsreg = gen_rtx_REG (Pmode, 2);
3789 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3790 }
3791 emit_insn (insn);
3792 if (TARGET_64BIT)
3793 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3794 else
3795 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3796 emit_insn (insn);
3797 }
3798 else
3799 {
3800 rtx r3, got, tga, tmp1, tmp2, eqv;
3801
4fed8f8f
AM
3802 /* We currently use relocations like @got@tlsgd for tls, which
3803 means the linker will handle allocation of tls entries, placing
3804 them in the .got section. So use a pointer to the .got section,
3805 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3806 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3807 if (TARGET_64BIT)
972f427b 3808 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3809 else
3810 {
3811 if (flag_pic == 1)
3812 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3813 else
3814 {
3815 rtx gsym = rs6000_got_sym ();
3816 got = gen_reg_rtx (Pmode);
3817 if (flag_pic == 0)
3818 rs6000_emit_move (got, gsym, Pmode);
3819 else
3820 {
e65a3857 3821 rtx tmp3, mem;
c4501e62
JJ
3822 rtx first, last;
3823
c4501e62
JJ
3824 tmp1 = gen_reg_rtx (Pmode);
3825 tmp2 = gen_reg_rtx (Pmode);
3826 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3827 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3828
e65a3857
DE
3829 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3830 emit_move_insn (tmp1,
1de43f85 3831 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3832 emit_move_insn (tmp2, mem);
3833 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3834 last = emit_move_insn (got, tmp3);
bd94cb6e 3835 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3836 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3837 }
3838 }
3839 }
3840
3841 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3842 {
3843 r3 = gen_rtx_REG (Pmode, 3);
3844 if (TARGET_64BIT)
3845 insn = gen_tls_gd_64 (r3, got, addr);
3846 else
3847 insn = gen_tls_gd_32 (r3, got, addr);
3848 start_sequence ();
3849 emit_insn (insn);
3850 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3851 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3852 insn = emit_call_insn (insn);
3853 CONST_OR_PURE_CALL_P (insn) = 1;
3854 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3855 insn = get_insns ();
3856 end_sequence ();
3857 emit_libcall_block (insn, dest, r3, addr);
3858 }
3859 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3860 {
3861 r3 = gen_rtx_REG (Pmode, 3);
3862 if (TARGET_64BIT)
3863 insn = gen_tls_ld_64 (r3, got);
3864 else
3865 insn = gen_tls_ld_32 (r3, got);
3866 start_sequence ();
3867 emit_insn (insn);
3868 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3869 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3870 insn = emit_call_insn (insn);
3871 CONST_OR_PURE_CALL_P (insn) = 1;
3872 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3873 insn = get_insns ();
3874 end_sequence ();
3875 tmp1 = gen_reg_rtx (Pmode);
3876 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3877 UNSPEC_TLSLD);
3878 emit_libcall_block (insn, tmp1, r3, eqv);
3879 if (rs6000_tls_size == 16)
3880 {
3881 if (TARGET_64BIT)
3882 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3883 else
3884 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3885 }
3886 else if (rs6000_tls_size == 32)
3887 {
3888 tmp2 = gen_reg_rtx (Pmode);
3889 if (TARGET_64BIT)
3890 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3891 else
3892 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3893 emit_insn (insn);
3894 if (TARGET_64BIT)
3895 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3896 else
3897 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3898 }
3899 else
3900 {
3901 tmp2 = gen_reg_rtx (Pmode);
3902 if (TARGET_64BIT)
3903 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3904 else
3905 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3906 emit_insn (insn);
3907 insn = gen_rtx_SET (Pmode, dest,
3908 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3909 }
3910 emit_insn (insn);
3911 }
3912 else
3913 {
a7b376ee 3914 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3915 tmp2 = gen_reg_rtx (Pmode);
3916 if (TARGET_64BIT)
3917 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3918 else
3919 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3920 emit_insn (insn);
3921 if (TARGET_64BIT)
3922 insn = gen_tls_tls_64 (dest, tmp2, addr);
3923 else
3924 insn = gen_tls_tls_32 (dest, tmp2, addr);
3925 emit_insn (insn);
3926 }
3927 }
3928
3929 return dest;
3930}
3931
c4501e62
JJ
3932/* Return 1 if X contains a thread-local symbol. */
3933
3934bool
a2369ed3 3935rs6000_tls_referenced_p (rtx x)
c4501e62 3936{
cd413cab
AP
3937 if (! TARGET_HAVE_TLS)
3938 return false;
3939
c4501e62
JJ
3940 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3941}
3942
3943/* Return 1 if *X is a thread-local symbol. This is the same as
3944 rs6000_tls_symbol_ref except for the type of the unused argument. */
3945
9390387d 3946static int
a2369ed3 3947rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3948{
3949 return RS6000_SYMBOL_REF_TLS_P (*x);
3950}
3951
24ea750e
DJ
3952/* The convention appears to be to define this wherever it is used.
3953 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3954 is now used here. */
3955#ifndef REG_MODE_OK_FOR_BASE_P
3956#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3957#endif
3958
3959/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3960 replace the input X, or the original X if no replacement is called for.
3961 The output parameter *WIN is 1 if the calling macro should goto WIN,
3962 0 if it should not.
3963
3964 For RS/6000, we wish to handle large displacements off a base
3965 register by splitting the addend across an addiu/addis and the mem insn.
3966 This cuts number of extra insns needed from 3 to 1.
3967
3968 On Darwin, we use this to generate code for floating point constants.
3969 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3970 The Darwin code is inside #if TARGET_MACHO because only then is
3971 machopic_function_base_name() defined. */
3972rtx
f676971a 3973rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3974 int opnum, int type,
3975 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3976{
f676971a 3977 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3978 if (GET_CODE (x) == PLUS
3979 && GET_CODE (XEXP (x, 0)) == PLUS
3980 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3981 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3982 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3983 {
3984 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3985 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3986 opnum, (enum reload_type)type);
24ea750e
DJ
3987 *win = 1;
3988 return x;
3989 }
3deb2758 3990
24ea750e
DJ
3991#if TARGET_MACHO
3992 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3993 && GET_CODE (x) == LO_SUM
3994 && GET_CODE (XEXP (x, 0)) == PLUS
3995 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3996 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3997 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3998 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3999 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4000 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4001 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4002 {
4003 /* Result of previous invocation of this function on Darwin
6f317ef3 4004 floating point constant. */
24ea750e 4005 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4006 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4007 opnum, (enum reload_type)type);
24ea750e
DJ
4008 *win = 1;
4009 return x;
4010 }
4011#endif
4937d02d
DE
4012
4013 /* Force ld/std non-word aligned offset into base register by wrapping
4014 in offset 0. */
4015 if (GET_CODE (x) == PLUS
4016 && GET_CODE (XEXP (x, 0)) == REG
4017 && REGNO (XEXP (x, 0)) < 32
4018 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4019 && GET_CODE (XEXP (x, 1)) == CONST_INT
4020 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4021 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4022 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4023 && TARGET_POWERPC64)
4024 {
4025 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4026 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4027 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4028 opnum, (enum reload_type) type);
4029 *win = 1;
4030 return x;
4031 }
4032
24ea750e
DJ
4033 if (GET_CODE (x) == PLUS
4034 && GET_CODE (XEXP (x, 0)) == REG
4035 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4036 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4037 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4038 && !SPE_VECTOR_MODE (mode)
17caeff2 4039 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4040 || mode == DDmode || mode == TDmode
54b695e7 4041 || mode == DImode))
78c875e8 4042 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4043 {
4044 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4045 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4046 HOST_WIDE_INT high
c4ad648e 4047 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4048
4049 /* Check for 32-bit overflow. */
4050 if (high + low != val)
c4ad648e 4051 {
24ea750e
DJ
4052 *win = 0;
4053 return x;
4054 }
4055
4056 /* Reload the high part into a base reg; leave the low part
c4ad648e 4057 in the mem directly. */
24ea750e
DJ
4058
4059 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4060 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4061 GEN_INT (high)),
4062 GEN_INT (low));
24ea750e
DJ
4063
4064 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4065 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4066 opnum, (enum reload_type)type);
24ea750e
DJ
4067 *win = 1;
4068 return x;
4069 }
4937d02d 4070
24ea750e 4071 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4072 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4073 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4074#if TARGET_MACHO
4075 && DEFAULT_ABI == ABI_DARWIN
a29077da 4076 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4077#else
4078 && DEFAULT_ABI == ABI_V4
4079 && !flag_pic
4080#endif
7393f7f8 4081 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4082 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4083 without fprs. */
0d8c1c97 4084 && mode != TFmode
7393f7f8 4085 && mode != TDmode
7b5d92b2 4086 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4087 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4088 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4089 {
8308679f 4090#if TARGET_MACHO
a29077da
GK
4091 if (flag_pic)
4092 {
4093 rtx offset = gen_rtx_CONST (Pmode,
4094 gen_rtx_MINUS (Pmode, x,
11abc112 4095 machopic_function_base_sym ()));
a29077da
GK
4096 x = gen_rtx_LO_SUM (GET_MODE (x),
4097 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4098 gen_rtx_HIGH (Pmode, offset)), offset);
4099 }
4100 else
8308679f 4101#endif
a29077da 4102 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4103 gen_rtx_HIGH (Pmode, x), x);
a29077da 4104
24ea750e 4105 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4106 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4107 opnum, (enum reload_type)type);
24ea750e
DJ
4108 *win = 1;
4109 return x;
4110 }
4937d02d 4111
dec1f3aa
DE
4112 /* Reload an offset address wrapped by an AND that represents the
4113 masking of the lower bits. Strip the outer AND and let reload
4114 convert the offset address into an indirect address. */
4115 if (TARGET_ALTIVEC
4116 && ALTIVEC_VECTOR_MODE (mode)
4117 && GET_CODE (x) == AND
4118 && GET_CODE (XEXP (x, 0)) == PLUS
4119 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4120 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4121 && GET_CODE (XEXP (x, 1)) == CONST_INT
4122 && INTVAL (XEXP (x, 1)) == -16)
4123 {
4124 x = XEXP (x, 0);
4125 *win = 1;
4126 return x;
4127 }
4128
24ea750e 4129 if (TARGET_TOC
4d588c14 4130 && constant_pool_expr_p (x)
c1f11548 4131 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4132 {
194c524a 4133 x = create_TOC_reference (x);
24ea750e
DJ
4134 *win = 1;
4135 return x;
4136 }
4137 *win = 0;
4138 return x;
f676971a 4139}
24ea750e 4140
258bfae2
FS
4141/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4142 that is a valid memory address for an instruction.
4143 The MODE argument is the machine mode for the MEM expression
4144 that wants to use this address.
4145
4146 On the RS/6000, there are four valid address: a SYMBOL_REF that
4147 refers to a constant pool entry of an address (or the sum of it
4148 plus a constant), a short (16-bit signed) constant plus a register,
4149 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4150 auto-increment. For DFmode, DDmode and DImode with a constant plus
4151 register, we must ensure that both words are addressable or PowerPC64
4152 with offset word aligned.
258bfae2 4153
4d4447b5 4154 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4155 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4156 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4157 during assembly output. */
4158int
a2369ed3 4159rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4160{
850e8d3d
DN
4161 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4162 if (TARGET_ALTIVEC
4163 && ALTIVEC_VECTOR_MODE (mode)
4164 && GET_CODE (x) == AND
4165 && GET_CODE (XEXP (x, 1)) == CONST_INT
4166 && INTVAL (XEXP (x, 1)) == -16)
4167 x = XEXP (x, 0);
4168
c4501e62
JJ
4169 if (RS6000_SYMBOL_REF_TLS_P (x))
4170 return 0;
4d588c14 4171 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4172 return 1;
4173 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4174 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4175 && !SPE_VECTOR_MODE (mode)
429ec7dc 4176 && mode != TFmode
7393f7f8 4177 && mode != TDmode
54b695e7 4178 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4179 && !(TARGET_E500_DOUBLE
4180 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4181 && TARGET_UPDATE
4d588c14 4182 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4183 return 1;
d04b6e6e 4184 if (legitimate_small_data_p (mode, x))
258bfae2 4185 return 1;
4d588c14 4186 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4187 return 1;
4188 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4189 if (! reg_ok_strict
4190 && GET_CODE (x) == PLUS
4191 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4192 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4193 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4194 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4195 return 1;
76d2b81d 4196 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4197 return 1;
4198 if (mode != TImode
76d2b81d 4199 && mode != TFmode
7393f7f8 4200 && mode != TDmode
a3170dc6
AH
4201 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4202 || TARGET_POWERPC64
4d4447b5 4203 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4204 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4205 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4206 return 1;
6fb5fa3c
DB
4207 if (GET_CODE (x) == PRE_MODIFY
4208 && mode != TImode
4209 && mode != TFmode
4210 && mode != TDmode
4211 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4212 || TARGET_POWERPC64
4d4447b5 4213 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4214 && (TARGET_POWERPC64 || mode != DImode)
4215 && !ALTIVEC_VECTOR_MODE (mode)
4216 && !SPE_VECTOR_MODE (mode)
4217 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4218 && !(TARGET_E500_DOUBLE
4219 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4220 && TARGET_UPDATE
4221 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4222 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4223 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4224 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4225 return 1;
4d588c14 4226 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4227 return 1;
4228 return 0;
4229}
4d588c14
RH
4230
4231/* Go to LABEL if ADDR (a legitimate address expression)
4232 has an effect that depends on the machine mode it is used for.
4233
4234 On the RS/6000 this is true of all integral offsets (since AltiVec
4235 modes don't allow them) or is a pre-increment or decrement.
4236
4237 ??? Except that due to conceptual problems in offsettable_address_p
4238 we can't really report the problems of integral offsets. So leave
f676971a 4239 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4240 sub-words of a TFmode operand, which is what we had before. */
4241
4242bool
a2369ed3 4243rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4244{
4245 switch (GET_CODE (addr))
4246 {
4247 case PLUS:
4248 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4249 {
4250 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4251 return val + 12 + 0x8000 >= 0x10000;
4252 }
4253 break;
4254
4255 case LO_SUM:
4256 return true;
4257
6fb5fa3c
DB
4258 case PRE_INC:
4259 case PRE_DEC:
4260 case PRE_MODIFY:
4261 return TARGET_UPDATE;
4d588c14
RH
4262
4263 default:
4264 break;
4265 }
4266
4267 return false;
4268}
d8ecbcdb 4269
d04b6e6e
EB
4270/* More elaborate version of recog's offsettable_memref_p predicate
4271 that works around the ??? note of rs6000_mode_dependent_address.
4272 In particular it accepts
4273
4274 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4275
4276 in 32-bit mode, that the recog predicate rejects. */
4277
4278bool
4279rs6000_offsettable_memref_p (rtx op)
4280{
4281 if (!MEM_P (op))
4282 return false;
4283
4284 /* First mimic offsettable_memref_p. */
4285 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4286 return true;
4287
4288 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4289 the latter predicate knows nothing about the mode of the memory
4290 reference and, therefore, assumes that it is the largest supported
4291 mode (TFmode). As a consequence, legitimate offsettable memory
4292 references are rejected. rs6000_legitimate_offset_address_p contains
4293 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4294 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4295}
4296
d8ecbcdb
AH
4297/* Return number of consecutive hard regs needed starting at reg REGNO
4298 to hold something of mode MODE.
4299 This is ordinarily the length in words of a value of mode MODE
4300 but can be less for certain modes in special long registers.
4301
4302 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4303 scalar instructions. The upper 32 bits are only available to the
4304 SIMD instructions.
4305
4306 POWER and PowerPC GPRs hold 32 bits worth;
4307 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4308
4309int
4310rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4311{
4312 if (FP_REGNO_P (regno))
4313 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4314
4315 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4316 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4317
4318 if (ALTIVEC_REGNO_P (regno))
4319 return
4320 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4321
8521c414
JM
4322 /* The value returned for SCmode in the E500 double case is 2 for
4323 ABI compatibility; storing an SCmode value in a single register
4324 would require function_arg and rs6000_spe_function_arg to handle
4325 SCmode so as to pass the value correctly in a pair of
4326 registers. */
4327 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4328 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4329
d8ecbcdb
AH
4330 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4331}
2aa4498c
AH
4332
4333/* Change register usage conditional on target flags. */
4334void
4335rs6000_conditional_register_usage (void)
4336{
4337 int i;
4338
4339 /* Set MQ register fixed (already call_used) if not POWER
4340 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4341 be allocated. */
4342 if (! TARGET_POWER)
4343 fixed_regs[64] = 1;
4344
7c9ac5c0 4345 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4346 if (TARGET_64BIT)
4347 fixed_regs[13] = call_used_regs[13]
4348 = call_really_used_regs[13] = 1;
4349
4350 /* Conditionally disable FPRs. */
4351 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4352 for (i = 32; i < 64; i++)
4353 fixed_regs[i] = call_used_regs[i]
c4ad648e 4354 = call_really_used_regs[i] = 1;
2aa4498c 4355
7c9ac5c0
PH
4356 /* The TOC register is not killed across calls in a way that is
4357 visible to the compiler. */
4358 if (DEFAULT_ABI == ABI_AIX)
4359 call_really_used_regs[2] = 0;
4360
2aa4498c
AH
4361 if (DEFAULT_ABI == ABI_V4
4362 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4363 && flag_pic == 2)
4364 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4365
4366 if (DEFAULT_ABI == ABI_V4
4367 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4368 && flag_pic == 1)
4369 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4370 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4371 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4372
4373 if (DEFAULT_ABI == ABI_DARWIN
4374 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4375 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4376 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4377 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4378
b4db40bf
JJ
4379 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4380 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4381 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4382
2aa4498c
AH
4383 if (TARGET_SPE)
4384 {
4385 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4386 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4387 registers in prologues and epilogues. We no longer use r14
4388 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4389 pool for link-compatibility with older versions of GCC. Once
4390 "old" code has died out, we can return r14 to the allocation
4391 pool. */
4392 fixed_regs[14]
4393 = call_used_regs[14]
4394 = call_really_used_regs[14] = 1;
2aa4498c
AH
4395 }
4396
0db747be 4397 if (!TARGET_ALTIVEC)
2aa4498c
AH
4398 {
4399 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4400 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4401 call_really_used_regs[VRSAVE_REGNO] = 1;
4402 }
4403
0db747be
DE
4404 if (TARGET_ALTIVEC)
4405 global_regs[VSCR_REGNO] = 1;
4406
2aa4498c 4407 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4408 {
4409 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4410 call_used_regs[i] = call_really_used_regs[i] = 1;
4411
4412 /* AIX reserves VR20:31 in non-extended ABI mode. */
4413 if (TARGET_XCOFF)
4414 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4415 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4416 }
2aa4498c 4417}
fb4d4348 4418\f
a4f6c312
SS
4419/* Try to output insns to set TARGET equal to the constant C if it can
4420 be done in less than N insns. Do all computations in MODE.
4421 Returns the place where the output has been placed if it can be
4422 done and the insns have been emitted. If it would take more than N
4423 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4424
4425rtx
f676971a 4426rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4427 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4428{
af8cb5c5 4429 rtx result, insn, set;
2bfcf297
DB
4430 HOST_WIDE_INT c0, c1;
4431
37409796 4432 switch (mode)
2bfcf297 4433 {
37409796
NS
4434 case QImode:
4435 case HImode:
2bfcf297 4436 if (dest == NULL)
c4ad648e 4437 dest = gen_reg_rtx (mode);
2bfcf297
DB
4438 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4439 return dest;
bb8df8a6 4440
37409796 4441 case SImode:
b3a13419 4442 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4443
d448860e 4444 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4445 GEN_INT (INTVAL (source)
4446 & (~ (HOST_WIDE_INT) 0xffff))));
4447 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4448 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4449 GEN_INT (INTVAL (source) & 0xffff))));
4450 result = dest;
37409796
NS
4451 break;
4452
4453 case DImode:
4454 switch (GET_CODE (source))
af8cb5c5 4455 {
37409796 4456 case CONST_INT:
af8cb5c5
DE
4457 c0 = INTVAL (source);
4458 c1 = -(c0 < 0);
37409796 4459 break;
bb8df8a6 4460
37409796 4461 case CONST_DOUBLE:
2bfcf297 4462#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4463 c0 = CONST_DOUBLE_LOW (source);
4464 c1 = -(c0 < 0);
2bfcf297 4465#else
af8cb5c5
DE
4466 c0 = CONST_DOUBLE_LOW (source);
4467 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4468#endif
37409796
NS
4469 break;
4470
4471 default:
4472 gcc_unreachable ();
af8cb5c5 4473 }
af8cb5c5
DE
4474
4475 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4476 break;
4477
4478 default:
4479 gcc_unreachable ();
2bfcf297 4480 }
2bfcf297 4481
af8cb5c5
DE
4482 insn = get_last_insn ();
4483 set = single_set (insn);
4484 if (! CONSTANT_P (SET_SRC (set)))
4485 set_unique_reg_note (insn, REG_EQUAL, source);
4486
4487 return result;
2bfcf297
DB
4488}
4489
4490/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4491 fall back to a straight forward decomposition. We do this to avoid
4492 exponential run times encountered when looking for longer sequences
4493 with rs6000_emit_set_const. */
4494static rtx
a2369ed3 4495rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4496{
4497 if (!TARGET_POWERPC64)
4498 {
4499 rtx operand1, operand2;
4500
4501 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4502 DImode);
d448860e 4503 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4504 DImode);
4505 emit_move_insn (operand1, GEN_INT (c1));
4506 emit_move_insn (operand2, GEN_INT (c2));
4507 }
4508 else
4509 {
bc06712d 4510 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4511
bc06712d 4512 ud1 = c1 & 0xffff;
f921c9c9 4513 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4514#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4515 c2 = c1 >> 32;
2bfcf297 4516#endif
bc06712d 4517 ud3 = c2 & 0xffff;
f921c9c9 4518 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4519
f676971a 4520 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4521 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4522 {
bc06712d 4523 if (ud1 & 0x8000)
b78d48dd 4524 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4525 else
4526 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4527 }
2bfcf297 4528
f676971a 4529 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4530 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4531 {
bc06712d 4532 if (ud2 & 0x8000)
f676971a 4533 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4534 - 0x80000000));
252b88f7 4535 else
bc06712d
TR
4536 emit_move_insn (dest, GEN_INT (ud2 << 16));
4537 if (ud1 != 0)
d448860e
JH
4538 emit_move_insn (copy_rtx (dest),
4539 gen_rtx_IOR (DImode, copy_rtx (dest),
4540 GEN_INT (ud1)));
252b88f7 4541 }
f676971a 4542 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4543 || (ud4 == 0 && ! (ud3 & 0x8000)))
4544 {
4545 if (ud3 & 0x8000)
f676971a 4546 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4547 - 0x80000000));
4548 else
4549 emit_move_insn (dest, GEN_INT (ud3 << 16));
4550
4551 if (ud2 != 0)
d448860e
JH
4552 emit_move_insn (copy_rtx (dest),
4553 gen_rtx_IOR (DImode, copy_rtx (dest),
4554 GEN_INT (ud2)));
4555 emit_move_insn (copy_rtx (dest),
4556 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4557 GEN_INT (16)));
bc06712d 4558 if (ud1 != 0)
d448860e
JH
4559 emit_move_insn (copy_rtx (dest),
4560 gen_rtx_IOR (DImode, copy_rtx (dest),
4561 GEN_INT (ud1)));
bc06712d 4562 }
f676971a 4563 else
bc06712d
TR
4564 {
4565 if (ud4 & 0x8000)
f676971a 4566 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4567 - 0x80000000));
4568 else
4569 emit_move_insn (dest, GEN_INT (ud4 << 16));
4570
4571 if (ud3 != 0)
d448860e
JH
4572 emit_move_insn (copy_rtx (dest),
4573 gen_rtx_IOR (DImode, copy_rtx (dest),
4574 GEN_INT (ud3)));
2bfcf297 4575
d448860e
JH
4576 emit_move_insn (copy_rtx (dest),
4577 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4578 GEN_INT (32)));
bc06712d 4579 if (ud2 != 0)
d448860e
JH
4580 emit_move_insn (copy_rtx (dest),
4581 gen_rtx_IOR (DImode, copy_rtx (dest),
4582 GEN_INT (ud2 << 16)));
bc06712d 4583 if (ud1 != 0)
d448860e
JH
4584 emit_move_insn (copy_rtx (dest),
4585 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4586 }
4587 }
2bfcf297
DB
4588 return dest;
4589}
4590
76d2b81d 4591/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4592 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4593
4594static void
4595rs6000_eliminate_indexed_memrefs (rtx operands[2])
4596{
4597 if (GET_CODE (operands[0]) == MEM
4598 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4599 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4600 && ! reload_in_progress)
4601 operands[0]
4602 = replace_equiv_address (operands[0],
4603 copy_addr_to_reg (XEXP (operands[0], 0)));
4604
4605 if (GET_CODE (operands[1]) == MEM
4606 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4607 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4608 && ! reload_in_progress)
4609 operands[1]
4610 = replace_equiv_address (operands[1],
4611 copy_addr_to_reg (XEXP (operands[1], 0)));
4612}
4613
fb4d4348
GK
4614/* Emit a move from SOURCE to DEST in mode MODE. */
4615void
a2369ed3 4616rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4617{
4618 rtx operands[2];
4619 operands[0] = dest;
4620 operands[1] = source;
f676971a 4621
fb4d4348
GK
4622 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4623 if (GET_CODE (operands[1]) == CONST_DOUBLE
4624 && ! FLOAT_MODE_P (mode)
4625 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4626 {
4627 /* FIXME. This should never happen. */
4628 /* Since it seems that it does, do the safe thing and convert
4629 to a CONST_INT. */
2496c7bd 4630 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4631 }
37409796
NS
4632 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4633 || FLOAT_MODE_P (mode)
4634 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4635 || CONST_DOUBLE_LOW (operands[1]) < 0)
4636 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4637 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4638
c9e8cb32
DD
4639 /* Check if GCC is setting up a block move that will end up using FP
4640 registers as temporaries. We must make sure this is acceptable. */
4641 if (GET_CODE (operands[0]) == MEM
4642 && GET_CODE (operands[1]) == MEM
4643 && mode == DImode
41543739
GK
4644 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4645 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4646 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4647 ? 32 : MEM_ALIGN (operands[0])))
4648 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4649 ? 32
41543739
GK
4650 : MEM_ALIGN (operands[1]))))
4651 && ! MEM_VOLATILE_P (operands [0])
4652 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4653 {
41543739
GK
4654 emit_move_insn (adjust_address (operands[0], SImode, 0),
4655 adjust_address (operands[1], SImode, 0));
d448860e
JH
4656 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4657 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4658 return;
4659 }
630d42a0 4660
b3a13419 4661 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4662 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4663 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4664
a3170dc6
AH
4665 if (mode == SFmode && ! TARGET_POWERPC
4666 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4667 && GET_CODE (operands[0]) == MEM)
fb4d4348 4668 {
ffc14f31
GK
4669 int regnum;
4670
4671 if (reload_in_progress || reload_completed)
4672 regnum = true_regnum (operands[1]);
4673 else if (GET_CODE (operands[1]) == REG)
4674 regnum = REGNO (operands[1]);
4675 else
4676 regnum = -1;
f676971a 4677
fb4d4348
GK
4678 /* If operands[1] is a register, on POWER it may have
4679 double-precision data in it, so truncate it to single
4680 precision. */
4681 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4682 {
4683 rtx newreg;
b3a13419 4684 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4685 : gen_reg_rtx (mode));
fb4d4348
GK
4686 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4687 operands[1] = newreg;
4688 }
4689 }
4690
c4501e62
JJ
4691 /* Recognize the case where operand[1] is a reference to thread-local
4692 data and load its address to a register. */
84f52ebd 4693 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4694 {
84f52ebd
RH
4695 enum tls_model model;
4696 rtx tmp = operands[1];
4697 rtx addend = NULL;
4698
4699 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4700 {
4701 addend = XEXP (XEXP (tmp, 0), 1);
4702 tmp = XEXP (XEXP (tmp, 0), 0);
4703 }
4704
4705 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4706 model = SYMBOL_REF_TLS_MODEL (tmp);
4707 gcc_assert (model != 0);
4708
4709 tmp = rs6000_legitimize_tls_address (tmp, model);
4710 if (addend)
4711 {
4712 tmp = gen_rtx_PLUS (mode, tmp, addend);
4713 tmp = force_operand (tmp, operands[0]);
4714 }
4715 operands[1] = tmp;
c4501e62
JJ
4716 }
4717
8f4e6caf
RH
4718 /* Handle the case where reload calls us with an invalid address. */
4719 if (reload_in_progress && mode == Pmode
69ef87e2 4720 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4721 || ! nonimmediate_operand (operands[0], mode)))
4722 goto emit_set;
4723
a9baceb1
GK
4724 /* 128-bit constant floating-point values on Darwin should really be
4725 loaded as two parts. */
8521c414 4726 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4727 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4728 {
4729 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4730 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4731 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4732 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4733 simplify_gen_subreg (imode, operands[1], mode, 0),
4734 imode);
4735 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4736 GET_MODE_SIZE (imode)),
4737 simplify_gen_subreg (imode, operands[1], mode,
4738 GET_MODE_SIZE (imode)),
4739 imode);
a9baceb1
GK
4740 return;
4741 }
4742
e41b2a33
PB
4743 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4744 cfun->machine->sdmode_stack_slot =
4745 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4746
4747 if (reload_in_progress
4748 && mode == SDmode
4749 && MEM_P (operands[0])
4750 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4751 && REG_P (operands[1]))
4752 {
4753 if (FP_REGNO_P (REGNO (operands[1])))
4754 {
4755 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4756 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4757 emit_insn (gen_movsd_store (mem, operands[1]));
4758 }
4759 else if (INT_REGNO_P (REGNO (operands[1])))
4760 {
4761 rtx mem = adjust_address_nv (operands[0], mode, 4);
4762 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4763 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4764 }
4765 else
4766 gcc_unreachable();
4767 return;
4768 }
4769 if (reload_in_progress
4770 && mode == SDmode
4771 && REG_P (operands[0])
4772 && MEM_P (operands[1])
4773 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4774 {
4775 if (FP_REGNO_P (REGNO (operands[0])))
4776 {
4777 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4778 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4779 emit_insn (gen_movsd_load (operands[0], mem));
4780 }
4781 else if (INT_REGNO_P (REGNO (operands[0])))
4782 {
4783 rtx mem = adjust_address_nv (operands[1], mode, 4);
4784 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4785 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4786 }
4787 else
4788 gcc_unreachable();
4789 return;
4790 }
4791
fb4d4348
GK
4792 /* FIXME: In the long term, this switch statement should go away
4793 and be replaced by a sequence of tests based on things like
4794 mode == Pmode. */
4795 switch (mode)
4796 {
4797 case HImode:
4798 case QImode:
4799 if (CONSTANT_P (operands[1])
4800 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4801 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4802 break;
4803
06f4e019 4804 case TFmode:
7393f7f8 4805 case TDmode:
76d2b81d
DJ
4806 rs6000_eliminate_indexed_memrefs (operands);
4807 /* fall through */
4808
fb4d4348 4809 case DFmode:
7393f7f8 4810 case DDmode:
fb4d4348 4811 case SFmode:
e41b2a33 4812 case SDmode:
f676971a 4813 if (CONSTANT_P (operands[1])
fb4d4348 4814 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4815 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4816 break;
f676971a 4817
0ac081f6
AH
4818 case V16QImode:
4819 case V8HImode:
4820 case V4SFmode:
4821 case V4SImode:
a3170dc6
AH
4822 case V4HImode:
4823 case V2SFmode:
4824 case V2SImode:
00a892b8 4825 case V1DImode:
69ef87e2 4826 if (CONSTANT_P (operands[1])
d744e06e 4827 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4828 operands[1] = force_const_mem (mode, operands[1]);
4829 break;
f676971a 4830
fb4d4348 4831 case SImode:
a9098fd0 4832 case DImode:
fb4d4348
GK
4833 /* Use default pattern for address of ELF small data */
4834 if (TARGET_ELF
a9098fd0 4835 && mode == Pmode
f607bc57 4836 && DEFAULT_ABI == ABI_V4
f676971a 4837 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4838 || GET_CODE (operands[1]) == CONST)
4839 && small_data_operand (operands[1], mode))
fb4d4348
GK
4840 {
4841 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4842 return;
4843 }
4844
f607bc57 4845 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4846 && mode == Pmode && mode == SImode
4847 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4848 {
4849 emit_insn (gen_movsi_got (operands[0], operands[1]));
4850 return;
4851 }
4852
ee890fe2 4853 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4854 && TARGET_NO_TOC
4855 && ! flag_pic
a9098fd0 4856 && mode == Pmode
fb4d4348
GK
4857 && CONSTANT_P (operands[1])
4858 && GET_CODE (operands[1]) != HIGH
4859 && GET_CODE (operands[1]) != CONST_INT)
4860 {
b3a13419
ILT
4861 rtx target = (!can_create_pseudo_p ()
4862 ? operands[0]
4863 : gen_reg_rtx (mode));
fb4d4348
GK
4864
4865 /* If this is a function address on -mcall-aixdesc,
4866 convert it to the address of the descriptor. */
4867 if (DEFAULT_ABI == ABI_AIX
4868 && GET_CODE (operands[1]) == SYMBOL_REF
4869 && XSTR (operands[1], 0)[0] == '.')
4870 {
4871 const char *name = XSTR (operands[1], 0);
4872 rtx new_ref;
4873 while (*name == '.')
4874 name++;
4875 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4876 CONSTANT_POOL_ADDRESS_P (new_ref)
4877 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4878 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4879 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4880 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4881 operands[1] = new_ref;
4882 }
7509c759 4883
ee890fe2
SS
4884 if (DEFAULT_ABI == ABI_DARWIN)
4885 {
ab82a49f
AP
4886#if TARGET_MACHO
4887 if (MACHO_DYNAMIC_NO_PIC_P)
4888 {
4889 /* Take care of any required data indirection. */
4890 operands[1] = rs6000_machopic_legitimize_pic_address (
4891 operands[1], mode, operands[0]);
4892 if (operands[0] != operands[1])
4893 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4894 operands[0], operands[1]));
ab82a49f
AP
4895 return;
4896 }
4897#endif
b8a55285
AP
4898 emit_insn (gen_macho_high (target, operands[1]));
4899 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4900 return;
4901 }
4902
fb4d4348
GK
4903 emit_insn (gen_elf_high (target, operands[1]));
4904 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4905 return;
4906 }
4907
a9098fd0
GK
4908 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4909 and we have put it in the TOC, we just need to make a TOC-relative
4910 reference to it. */
4911 if (TARGET_TOC
4912 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4913 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4914 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4915 get_pool_mode (operands[1])))
fb4d4348 4916 {
a9098fd0 4917 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4918 }
a9098fd0
GK
4919 else if (mode == Pmode
4920 && CONSTANT_P (operands[1])
38886f37
AO
4921 && ((GET_CODE (operands[1]) != CONST_INT
4922 && ! easy_fp_constant (operands[1], mode))
4923 || (GET_CODE (operands[1]) == CONST_INT
4924 && num_insns_constant (operands[1], mode) > 2)
4925 || (GET_CODE (operands[0]) == REG
4926 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4927 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4928 && ! legitimate_constant_pool_address_p (operands[1])
4929 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4930 {
4931 /* Emit a USE operation so that the constant isn't deleted if
4932 expensive optimizations are turned on because nobody
4933 references it. This should only be done for operands that
4934 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4935 This should not be done for operands that contain LABEL_REFs.
4936 For now, we just handle the obvious case. */
4937 if (GET_CODE (operands[1]) != LABEL_REF)
4938 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4939
c859cda6 4940#if TARGET_MACHO
ee890fe2 4941 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4942 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4943 {
ee890fe2
SS
4944 operands[1] =
4945 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4946 operands[0]);
4947 if (operands[0] != operands[1])
4948 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4949 return;
4950 }
c859cda6 4951#endif
ee890fe2 4952
fb4d4348
GK
4953 /* If we are to limit the number of things we put in the TOC and
4954 this is a symbol plus a constant we can add in one insn,
4955 just put the symbol in the TOC and add the constant. Don't do
4956 this if reload is in progress. */
4957 if (GET_CODE (operands[1]) == CONST
4958 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4959 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4960 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4961 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4962 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4963 && ! side_effects_p (operands[0]))
4964 {
a4f6c312
SS
4965 rtx sym =
4966 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4967 rtx other = XEXP (XEXP (operands[1], 0), 1);
4968
a9098fd0
GK
4969 sym = force_reg (mode, sym);
4970 if (mode == SImode)
4971 emit_insn (gen_addsi3 (operands[0], sym, other));
4972 else
4973 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4974 return;
4975 }
4976
a9098fd0 4977 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4978
f676971a 4979 if (TARGET_TOC
4d588c14 4980 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4981 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4982 get_pool_constant (XEXP (operands[1], 0)),
4983 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4984 {
ba4828e0 4985 operands[1]
542a8afa 4986 = gen_const_mem (mode,
c4ad648e 4987 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4988 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4989 }
fb4d4348
GK
4990 }
4991 break;
a9098fd0 4992
fb4d4348 4993 case TImode:
76d2b81d
DJ
4994 rs6000_eliminate_indexed_memrefs (operands);
4995
27dc0551
DE
4996 if (TARGET_POWER)
4997 {
4998 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4999 gen_rtvec (2,
5000 gen_rtx_SET (VOIDmode,
5001 operands[0], operands[1]),
5002 gen_rtx_CLOBBER (VOIDmode,
5003 gen_rtx_SCRATCH (SImode)))));
5004 return;
5005 }
fb4d4348
GK
5006 break;
5007
5008 default:
37409796 5009 gcc_unreachable ();
fb4d4348
GK
5010 }
5011
a9098fd0
GK
5012 /* Above, we may have called force_const_mem which may have returned
5013 an invalid address. If we can, fix this up; otherwise, reload will
5014 have to deal with it. */
8f4e6caf
RH
5015 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5016 operands[1] = validize_mem (operands[1]);
a9098fd0 5017
8f4e6caf 5018 emit_set:
fb4d4348
GK
5019 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5020}
4697a36c 5021\f
2858f73a
GK
5022/* Nonzero if we can use a floating-point register to pass this arg. */
5023#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5024 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5025 && (CUM)->fregno <= FP_ARG_MAX_REG \
5026 && TARGET_HARD_FLOAT && TARGET_FPRS)
5027
5028/* Nonzero if we can use an AltiVec register to pass this arg. */
5029#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5030 (ALTIVEC_VECTOR_MODE (MODE) \
5031 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5032 && TARGET_ALTIVEC_ABI \
83953138 5033 && (NAMED))
2858f73a 5034
c6e8c921
GK
5035/* Return a nonzero value to say to return the function value in
5036 memory, just as large structures are always returned. TYPE will be
5037 the data type of the value, and FNTYPE will be the type of the
5038 function doing the returning, or @code{NULL} for libcalls.
5039
5040 The AIX ABI for the RS/6000 specifies that all structures are
5041 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5042 specifies that structures <= 8 bytes are returned in r3/r4, but a
5043 draft put them in memory, and GCC used to implement the draft
df01da37 5044 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5045 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5046 compatibility can change DRAFT_V4_STRUCT_RET to override the
5047 default, and -m switches get the final word. See
5048 rs6000_override_options for more details.
5049
5050 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5051 long double support is enabled. These values are returned in memory.
5052
5053 int_size_in_bytes returns -1 for variable size objects, which go in
5054 memory always. The cast to unsigned makes -1 > 8. */
5055
5056static bool
586de218 5057rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5058{
594a51fe
SS
5059 /* In the darwin64 abi, try to use registers for larger structs
5060 if possible. */
0b5383eb 5061 if (rs6000_darwin64_abi
594a51fe 5062 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5063 && int_size_in_bytes (type) > 0)
5064 {
5065 CUMULATIVE_ARGS valcum;
5066 rtx valret;
5067
5068 valcum.words = 0;
5069 valcum.fregno = FP_ARG_MIN_REG;
5070 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5071 /* Do a trial code generation as if this were going to be passed
5072 as an argument; if any part goes in memory, we return NULL. */
5073 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5074 if (valret)
5075 return false;
5076 /* Otherwise fall through to more conventional ABI rules. */
5077 }
594a51fe 5078
c6e8c921 5079 if (AGGREGATE_TYPE_P (type)
df01da37 5080 && (aix_struct_return
c6e8c921
GK
5081 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5082 return true;
b693336b 5083
bada2eb8
DE
5084 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5085 modes only exist for GCC vector types if -maltivec. */
5086 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5087 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5088 return false;
5089
b693336b
PB
5090 /* Return synthetic vectors in memory. */
5091 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5092 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5093 {
5094 static bool warned_for_return_big_vectors = false;
5095 if (!warned_for_return_big_vectors)
5096 {
d4ee4d25 5097 warning (0, "GCC vector returned by reference: "
b693336b
PB
5098 "non-standard ABI extension with no compatibility guarantee");
5099 warned_for_return_big_vectors = true;
5100 }
5101 return true;
5102 }
5103
602ea4d3 5104 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5105 return true;
ad630bef 5106
c6e8c921
GK
5107 return false;
5108}
5109
4697a36c
MM
5110/* Initialize a variable CUM of type CUMULATIVE_ARGS
5111 for a call to a function whose data type is FNTYPE.
5112 For a library call, FNTYPE is 0.
5113
5114 For incoming args we set the number of arguments in the prototype large
1c20ae99 5115 so we never return a PARALLEL. */
4697a36c
MM
5116
5117void
f676971a 5118init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5119 rtx libname ATTRIBUTE_UNUSED, int incoming,
5120 int libcall, int n_named_args)
4697a36c
MM
5121{
5122 static CUMULATIVE_ARGS zero_cumulative;
5123
5124 *cum = zero_cumulative;
5125 cum->words = 0;
5126 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5127 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5128 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5129 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5130 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5131 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5132 cum->stdarg = fntype
5133 && (TYPE_ARG_TYPES (fntype) != 0
5134 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5135 != void_type_node));
4697a36c 5136
0f6937fe
AM
5137 cum->nargs_prototype = 0;
5138 if (incoming || cum->prototype)
5139 cum->nargs_prototype = n_named_args;
4697a36c 5140
a5c76ee6 5141 /* Check for a longcall attribute. */
3eb4e360
AM
5142 if ((!fntype && rs6000_default_long_calls)
5143 || (fntype
5144 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5145 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5146 cum->call_cookie |= CALL_LONG;
6a4cee5f 5147
4697a36c
MM
5148 if (TARGET_DEBUG_ARG)
5149 {
5150 fprintf (stderr, "\ninit_cumulative_args:");
5151 if (fntype)
5152 {
5153 tree ret_type = TREE_TYPE (fntype);
5154 fprintf (stderr, " ret code = %s,",
5155 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5156 }
5157
6a4cee5f
MM
5158 if (cum->call_cookie & CALL_LONG)
5159 fprintf (stderr, " longcall,");
5160
4697a36c
MM
5161 fprintf (stderr, " proto = %d, nargs = %d\n",
5162 cum->prototype, cum->nargs_prototype);
5163 }
f676971a 5164
c4ad648e
AM
5165 if (fntype
5166 && !TARGET_ALTIVEC
5167 && TARGET_ALTIVEC_ABI
5168 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5169 {
c85ce869 5170 error ("cannot return value in vector register because"
c4ad648e 5171 " altivec instructions are disabled, use -maltivec"
c85ce869 5172 " to enable them");
c4ad648e 5173 }
4697a36c
MM
5174}
5175\f
fe984136
RH
5176/* Return true if TYPE must be passed on the stack and not in registers. */
5177
5178static bool
586de218 5179rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5180{
5181 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5182 return must_pass_in_stack_var_size (mode, type);
5183 else
5184 return must_pass_in_stack_var_size_or_pad (mode, type);
5185}
5186
c229cba9
DE
5187/* If defined, a C expression which determines whether, and in which
5188 direction, to pad out an argument with extra space. The value
5189 should be of type `enum direction': either `upward' to pad above
5190 the argument, `downward' to pad below, or `none' to inhibit
5191 padding.
5192
5193 For the AIX ABI structs are always stored left shifted in their
5194 argument slot. */
5195
9ebbca7d 5196enum direction
586de218 5197function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5198{
6e985040
AM
5199#ifndef AGGREGATE_PADDING_FIXED
5200#define AGGREGATE_PADDING_FIXED 0
5201#endif
5202#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5203#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5204#endif
5205
5206 if (!AGGREGATE_PADDING_FIXED)
5207 {
5208 /* GCC used to pass structures of the same size as integer types as
5209 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5210 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5211 passed padded downward, except that -mstrict-align further
5212 muddied the water in that multi-component structures of 2 and 4
5213 bytes in size were passed padded upward.
5214
5215 The following arranges for best compatibility with previous
5216 versions of gcc, but removes the -mstrict-align dependency. */
5217 if (BYTES_BIG_ENDIAN)
5218 {
5219 HOST_WIDE_INT size = 0;
5220
5221 if (mode == BLKmode)
5222 {
5223 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5224 size = int_size_in_bytes (type);
5225 }
5226 else
5227 size = GET_MODE_SIZE (mode);
5228
5229 if (size == 1 || size == 2 || size == 4)
5230 return downward;
5231 }
5232 return upward;
5233 }
5234
5235 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5236 {
5237 if (type != 0 && AGGREGATE_TYPE_P (type))
5238 return upward;
5239 }
c229cba9 5240
d3704c46
KH
5241 /* Fall back to the default. */
5242 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5243}
5244
b6c9286a 5245/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5246 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5247 PARM_BOUNDARY is used for all arguments.
f676971a 5248
84e9ad15
AM
5249 V.4 wants long longs and doubles to be double word aligned. Just
5250 testing the mode size is a boneheaded way to do this as it means
5251 that other types such as complex int are also double word aligned.
5252 However, we're stuck with this because changing the ABI might break
5253 existing library interfaces.
5254
b693336b
PB
5255 Doubleword align SPE vectors.
5256 Quadword align Altivec vectors.
5257 Quadword align large synthetic vector types. */
b6c9286a
MM
5258
5259int
b693336b 5260function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5261{
84e9ad15
AM
5262 if (DEFAULT_ABI == ABI_V4
5263 && (GET_MODE_SIZE (mode) == 8
5264 || (TARGET_HARD_FLOAT
5265 && TARGET_FPRS
7393f7f8 5266 && (mode == TFmode || mode == TDmode))))
4ed78545 5267 return 64;
ad630bef
DE
5268 else if (SPE_VECTOR_MODE (mode)
5269 || (type && TREE_CODE (type) == VECTOR_TYPE
5270 && int_size_in_bytes (type) >= 8
5271 && int_size_in_bytes (type) < 16))
e1f83b4d 5272 return 64;
ad630bef
DE
5273 else if (ALTIVEC_VECTOR_MODE (mode)
5274 || (type && TREE_CODE (type) == VECTOR_TYPE
5275 && int_size_in_bytes (type) >= 16))
0ac081f6 5276 return 128;
0b5383eb
DJ
5277 else if (rs6000_darwin64_abi && mode == BLKmode
5278 && type && TYPE_ALIGN (type) > 64)
5279 return 128;
9ebbca7d 5280 else
b6c9286a 5281 return PARM_BOUNDARY;
b6c9286a 5282}
c53bdcf5 5283
294bd182
AM
5284/* For a function parm of MODE and TYPE, return the starting word in
5285 the parameter area. NWORDS of the parameter area are already used. */
5286
5287static unsigned int
5288rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5289{
5290 unsigned int align;
5291 unsigned int parm_offset;
5292
5293 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5294 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5295 return nwords + (-(parm_offset + nwords) & align);
5296}
5297
c53bdcf5
AM
5298/* Compute the size (in words) of a function argument. */
5299
5300static unsigned long
5301rs6000_arg_size (enum machine_mode mode, tree type)
5302{
5303 unsigned long size;
5304
5305 if (mode != BLKmode)
5306 size = GET_MODE_SIZE (mode);
5307 else
5308 size = int_size_in_bytes (type);
5309
5310 if (TARGET_32BIT)
5311 return (size + 3) >> 2;
5312 else
5313 return (size + 7) >> 3;
5314}
b6c9286a 5315\f
0b5383eb 5316/* Use this to flush pending int fields. */
594a51fe
SS
5317
5318static void
0b5383eb
DJ
5319rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5320 HOST_WIDE_INT bitpos)
594a51fe 5321{
0b5383eb
DJ
5322 unsigned int startbit, endbit;
5323 int intregs, intoffset;
5324 enum machine_mode mode;
594a51fe 5325
0b5383eb
DJ
5326 if (cum->intoffset == -1)
5327 return;
594a51fe 5328
0b5383eb
DJ
5329 intoffset = cum->intoffset;
5330 cum->intoffset = -1;
5331
5332 if (intoffset % BITS_PER_WORD != 0)
5333 {
5334 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5335 MODE_INT, 0);
5336 if (mode == BLKmode)
594a51fe 5337 {
0b5383eb
DJ
5338 /* We couldn't find an appropriate mode, which happens,
5339 e.g., in packed structs when there are 3 bytes to load.
5340 Back intoffset back to the beginning of the word in this
5341 case. */
5342 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5343 }
594a51fe 5344 }
0b5383eb
DJ
5345
5346 startbit = intoffset & -BITS_PER_WORD;
5347 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5348 intregs = (endbit - startbit) / BITS_PER_WORD;
5349 cum->words += intregs;
5350}
5351
5352/* The darwin64 ABI calls for us to recurse down through structs,
5353 looking for elements passed in registers. Unfortunately, we have
5354 to track int register count here also because of misalignments
5355 in powerpc alignment mode. */
5356
5357static void
5358rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5359 tree type,
5360 HOST_WIDE_INT startbitpos)
5361{
5362 tree f;
5363
5364 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5365 if (TREE_CODE (f) == FIELD_DECL)
5366 {
5367 HOST_WIDE_INT bitpos = startbitpos;
5368 tree ftype = TREE_TYPE (f);
70fb00df
AP
5369 enum machine_mode mode;
5370 if (ftype == error_mark_node)
5371 continue;
5372 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5373
5374 if (DECL_SIZE (f) != 0
5375 && host_integerp (bit_position (f), 1))
5376 bitpos += int_bit_position (f);
5377
5378 /* ??? FIXME: else assume zero offset. */
5379
5380 if (TREE_CODE (ftype) == RECORD_TYPE)
5381 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5382 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5383 {
5384 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5385 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5386 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5387 }
5388 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5389 {
5390 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5391 cum->vregno++;
5392 cum->words += 2;
5393 }
5394 else if (cum->intoffset == -1)
5395 cum->intoffset = bitpos;
5396 }
594a51fe
SS
5397}
5398
4697a36c
MM
5399/* Update the data in CUM to advance over an argument
5400 of mode MODE and data type TYPE.
b2d04ecf
AM
5401 (TYPE is null for libcalls where that information may not be available.)
5402
5403 Note that for args passed by reference, function_arg will be called
5404 with MODE and TYPE set to that of the pointer to the arg, not the arg
5405 itself. */
4697a36c
MM
5406
5407void
f676971a 5408function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5409 tree type, int named, int depth)
4697a36c 5410{
0b5383eb
DJ
5411 int size;
5412
594a51fe
SS
5413 /* Only tick off an argument if we're not recursing. */
5414 if (depth == 0)
5415 cum->nargs_prototype--;
4697a36c 5416
ad630bef
DE
5417 if (TARGET_ALTIVEC_ABI
5418 && (ALTIVEC_VECTOR_MODE (mode)
5419 || (type && TREE_CODE (type) == VECTOR_TYPE
5420 && int_size_in_bytes (type) == 16)))
0ac081f6 5421 {
4ed78545
AM
5422 bool stack = false;
5423
2858f73a 5424 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5425 {
6d0ef01e
HP
5426 cum->vregno++;
5427 if (!TARGET_ALTIVEC)
c85ce869 5428 error ("cannot pass argument in vector register because"
6d0ef01e 5429 " altivec instructions are disabled, use -maltivec"
c85ce869 5430 " to enable them");
4ed78545
AM
5431
5432 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5433 even if it is going to be passed in a vector register.
4ed78545
AM
5434 Darwin does the same for variable-argument functions. */
5435 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5436 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5437 stack = true;
6d0ef01e 5438 }
4ed78545
AM
5439 else
5440 stack = true;
5441
5442 if (stack)
c4ad648e 5443 {
a594a19c 5444 int align;
f676971a 5445
2858f73a
GK
5446 /* Vector parameters must be 16-byte aligned. This places
5447 them at 2 mod 4 in terms of words in 32-bit mode, since
5448 the parameter save area starts at offset 24 from the
5449 stack. In 64-bit mode, they just have to start on an
5450 even word, since the parameter save area is 16-byte
5451 aligned. Space for GPRs is reserved even if the argument
5452 will be passed in memory. */
5453 if (TARGET_32BIT)
4ed78545 5454 align = (2 - cum->words) & 3;
2858f73a
GK
5455 else
5456 align = cum->words & 1;
c53bdcf5 5457 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5458
a594a19c
GK
5459 if (TARGET_DEBUG_ARG)
5460 {
f676971a 5461 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5462 cum->words, align);
5463 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5464 cum->nargs_prototype, cum->prototype,
2858f73a 5465 GET_MODE_NAME (mode));
a594a19c
GK
5466 }
5467 }
0ac081f6 5468 }
a4b0320c 5469 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5470 && !cum->stdarg
5471 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5472 cum->sysv_gregno++;
594a51fe
SS
5473
5474 else if (rs6000_darwin64_abi
5475 && mode == BLKmode
0b5383eb
DJ
5476 && TREE_CODE (type) == RECORD_TYPE
5477 && (size = int_size_in_bytes (type)) > 0)
5478 {
5479 /* Variable sized types have size == -1 and are
5480 treated as if consisting entirely of ints.
5481 Pad to 16 byte boundary if needed. */
5482 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5483 && (cum->words % 2) != 0)
5484 cum->words++;
5485 /* For varargs, we can just go up by the size of the struct. */
5486 if (!named)
5487 cum->words += (size + 7) / 8;
5488 else
5489 {
5490 /* It is tempting to say int register count just goes up by
5491 sizeof(type)/8, but this is wrong in a case such as
5492 { int; double; int; } [powerpc alignment]. We have to
5493 grovel through the fields for these too. */
5494 cum->intoffset = 0;
5495 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5496 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5497 size * BITS_PER_UNIT);
5498 }
5499 }
f607bc57 5500 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5501 {
a3170dc6 5502 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5503 && (mode == SFmode || mode == DFmode
e41b2a33 5504 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5505 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5506 {
2d83f070
JJ
5507 /* _Decimal128 must use an even/odd register pair. This assumes
5508 that the register number is odd when fregno is odd. */
5509 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5510 cum->fregno++;
5511
5512 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5513 <= FP_ARG_V4_MAX_REG)
602ea4d3 5514 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5515 else
5516 {
602ea4d3 5517 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5518 if (mode == DFmode || mode == TFmode
5519 || mode == DDmode || mode == TDmode)
c4ad648e 5520 cum->words += cum->words & 1;
c53bdcf5 5521 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5522 }
4697a36c 5523 }
4cc833b7
RH
5524 else
5525 {
b2d04ecf 5526 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5527 int gregno = cum->sysv_gregno;
5528
4ed78545
AM
5529 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5530 (r7,r8) or (r9,r10). As does any other 2 word item such
5531 as complex int due to a historical mistake. */
5532 if (n_words == 2)
5533 gregno += (1 - gregno) & 1;
4cc833b7 5534
4ed78545 5535 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5536 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5537 {
4ed78545
AM
5538 /* Long long and SPE vectors are aligned on the stack.
5539 So are other 2 word items such as complex int due to
5540 a historical mistake. */
4cc833b7
RH
5541 if (n_words == 2)
5542 cum->words += cum->words & 1;
5543 cum->words += n_words;
5544 }
4697a36c 5545
4cc833b7
RH
5546 /* Note: continuing to accumulate gregno past when we've started
5547 spilling to the stack indicates the fact that we've started
5548 spilling to the stack to expand_builtin_saveregs. */
5549 cum->sysv_gregno = gregno + n_words;
5550 }
4697a36c 5551
4cc833b7
RH
5552 if (TARGET_DEBUG_ARG)
5553 {
5554 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5555 cum->words, cum->fregno);
5556 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5557 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5558 fprintf (stderr, "mode = %4s, named = %d\n",
5559 GET_MODE_NAME (mode), named);
5560 }
4697a36c
MM
5561 }
5562 else
4cc833b7 5563 {
b2d04ecf 5564 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5565 int start_words = cum->words;
5566 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5567
294bd182 5568 cum->words = align_words + n_words;
4697a36c 5569
ebb109ad 5570 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5571 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5572 {
5573 /* _Decimal128 must be passed in an even/odd float register pair.
5574 This assumes that the register number is odd when fregno is
5575 odd. */
5576 if (mode == TDmode && (cum->fregno % 2) == 1)
5577 cum->fregno++;
5578 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5579 }
4cc833b7
RH
5580
5581 if (TARGET_DEBUG_ARG)
5582 {
5583 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5584 cum->words, cum->fregno);
5585 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5586 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5587 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5588 named, align_words - start_words, depth);
4cc833b7
RH
5589 }
5590 }
4697a36c 5591}
a6c9bed4 5592
f82f556d
AH
5593static rtx
5594spe_build_register_parallel (enum machine_mode mode, int gregno)
5595{
17caeff2 5596 rtx r1, r3, r5, r7;
f82f556d 5597
37409796 5598 switch (mode)
f82f556d 5599 {
37409796 5600 case DFmode:
4d4447b5 5601 case DDmode:
54b695e7
AH
5602 r1 = gen_rtx_REG (DImode, gregno);
5603 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5604 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5605
5606 case DCmode:
17caeff2 5607 case TFmode:
4d4447b5 5608 case TDmode:
54b695e7
AH
5609 r1 = gen_rtx_REG (DImode, gregno);
5610 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5611 r3 = gen_rtx_REG (DImode, gregno + 2);
5612 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5613 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5614
17caeff2
JM
5615 case TCmode:
5616 r1 = gen_rtx_REG (DImode, gregno);
5617 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5618 r3 = gen_rtx_REG (DImode, gregno + 2);
5619 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5620 r5 = gen_rtx_REG (DImode, gregno + 4);
5621 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5622 r7 = gen_rtx_REG (DImode, gregno + 6);
5623 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5624 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5625
37409796
NS
5626 default:
5627 gcc_unreachable ();
f82f556d 5628 }
f82f556d 5629}
b78d48dd 5630
f82f556d 5631/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5632static rtx
f676971a 5633rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5634 tree type)
a6c9bed4 5635{
f82f556d
AH
5636 int gregno = cum->sysv_gregno;
5637
5638 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5639 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5640 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5641 || mode == DDmode || mode == TDmode
5642 || mode == DCmode || mode == TCmode))
f82f556d 5643 {
b5870bee
AH
5644 int n_words = rs6000_arg_size (mode, type);
5645
f82f556d 5646 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5647 if (mode == DFmode || mode == DDmode)
b5870bee 5648 gregno += (1 - gregno) & 1;
f82f556d 5649
b5870bee
AH
5650 /* Multi-reg args are not split between registers and stack. */
5651 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5652 return NULL_RTX;
5653
5654 return spe_build_register_parallel (mode, gregno);
5655 }
a6c9bed4
AH
5656 if (cum->stdarg)
5657 {
c53bdcf5 5658 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5659
5660 /* SPE vectors are put in odd registers. */
5661 if (n_words == 2 && (gregno & 1) == 0)
5662 gregno += 1;
5663
5664 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5665 {
5666 rtx r1, r2;
5667 enum machine_mode m = SImode;
5668
5669 r1 = gen_rtx_REG (m, gregno);
5670 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5671 r2 = gen_rtx_REG (m, gregno + 1);
5672 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5673 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5674 }
5675 else
b78d48dd 5676 return NULL_RTX;
a6c9bed4
AH
5677 }
5678 else
5679 {
f82f556d
AH
5680 if (gregno <= GP_ARG_MAX_REG)
5681 return gen_rtx_REG (mode, gregno);
a6c9bed4 5682 else
b78d48dd 5683 return NULL_RTX;
a6c9bed4
AH
5684 }
5685}
5686
0b5383eb
DJ
5687/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5688 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5689
0b5383eb 5690static void
bb8df8a6 5691rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5692 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5693{
0b5383eb
DJ
5694 enum machine_mode mode;
5695 unsigned int regno;
5696 unsigned int startbit, endbit;
5697 int this_regno, intregs, intoffset;
5698 rtx reg;
594a51fe 5699
0b5383eb
DJ
5700 if (cum->intoffset == -1)
5701 return;
5702
5703 intoffset = cum->intoffset;
5704 cum->intoffset = -1;
5705
5706 /* If this is the trailing part of a word, try to only load that
5707 much into the register. Otherwise load the whole register. Note
5708 that in the latter case we may pick up unwanted bits. It's not a
5709 problem at the moment but may wish to revisit. */
5710
5711 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5712 {
0b5383eb
DJ
5713 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5714 MODE_INT, 0);
5715 if (mode == BLKmode)
5716 {
5717 /* We couldn't find an appropriate mode, which happens,
5718 e.g., in packed structs when there are 3 bytes to load.
5719 Back intoffset back to the beginning of the word in this
5720 case. */
5721 intoffset = intoffset & -BITS_PER_WORD;
5722 mode = word_mode;
5723 }
5724 }
5725 else
5726 mode = word_mode;
5727
5728 startbit = intoffset & -BITS_PER_WORD;
5729 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5730 intregs = (endbit - startbit) / BITS_PER_WORD;
5731 this_regno = cum->words + intoffset / BITS_PER_WORD;
5732
5733 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5734 cum->use_stack = 1;
bb8df8a6 5735
0b5383eb
DJ
5736 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5737 if (intregs <= 0)
5738 return;
5739
5740 intoffset /= BITS_PER_UNIT;
5741 do
5742 {
5743 regno = GP_ARG_MIN_REG + this_regno;
5744 reg = gen_rtx_REG (mode, regno);
5745 rvec[(*k)++] =
5746 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5747
5748 this_regno += 1;
5749 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5750 mode = word_mode;
5751 intregs -= 1;
5752 }
5753 while (intregs > 0);
5754}
5755
5756/* Recursive workhorse for the following. */
5757
5758static void
586de218 5759rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5760 HOST_WIDE_INT startbitpos, rtx rvec[],
5761 int *k)
5762{
5763 tree f;
5764
5765 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5766 if (TREE_CODE (f) == FIELD_DECL)
5767 {
5768 HOST_WIDE_INT bitpos = startbitpos;
5769 tree ftype = TREE_TYPE (f);
70fb00df
AP
5770 enum machine_mode mode;
5771 if (ftype == error_mark_node)
5772 continue;
5773 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5774
5775 if (DECL_SIZE (f) != 0
5776 && host_integerp (bit_position (f), 1))
5777 bitpos += int_bit_position (f);
5778
5779 /* ??? FIXME: else assume zero offset. */
5780
5781 if (TREE_CODE (ftype) == RECORD_TYPE)
5782 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5783 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5784 {
0b5383eb
DJ
5785#if 0
5786 switch (mode)
594a51fe 5787 {
0b5383eb
DJ
5788 case SCmode: mode = SFmode; break;
5789 case DCmode: mode = DFmode; break;
5790 case TCmode: mode = TFmode; break;
5791 default: break;
594a51fe 5792 }
0b5383eb
DJ
5793#endif
5794 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5795 rvec[(*k)++]
bb8df8a6 5796 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5797 gen_rtx_REG (mode, cum->fregno++),
5798 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5799 if (mode == TFmode || mode == TDmode)
0b5383eb 5800 cum->fregno++;
594a51fe 5801 }
0b5383eb
DJ
5802 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5803 {
5804 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5805 rvec[(*k)++]
bb8df8a6
EC
5806 = gen_rtx_EXPR_LIST (VOIDmode,
5807 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5808 GEN_INT (bitpos / BITS_PER_UNIT));
5809 }
5810 else if (cum->intoffset == -1)
5811 cum->intoffset = bitpos;
5812 }
5813}
594a51fe 5814
0b5383eb
DJ
5815/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5816 the register(s) to be used for each field and subfield of a struct
5817 being passed by value, along with the offset of where the
5818 register's value may be found in the block. FP fields go in FP
5819 register, vector fields go in vector registers, and everything
bb8df8a6 5820 else goes in int registers, packed as in memory.
8ff40a74 5821
0b5383eb
DJ
5822 This code is also used for function return values. RETVAL indicates
5823 whether this is the case.
8ff40a74 5824
a4d05547 5825 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5826 calling convention. */
594a51fe 5827
0b5383eb 5828static rtx
586de218 5829rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5830 int named, bool retval)
5831{
5832 rtx rvec[FIRST_PSEUDO_REGISTER];
5833 int k = 1, kbase = 1;
5834 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5835 /* This is a copy; modifications are not visible to our caller. */
5836 CUMULATIVE_ARGS copy_cum = *orig_cum;
5837 CUMULATIVE_ARGS *cum = &copy_cum;
5838
5839 /* Pad to 16 byte boundary if needed. */
5840 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5841 && (cum->words % 2) != 0)
5842 cum->words++;
5843
5844 cum->intoffset = 0;
5845 cum->use_stack = 0;
5846 cum->named = named;
5847
5848 /* Put entries into rvec[] for individual FP and vector fields, and
5849 for the chunks of memory that go in int regs. Note we start at
5850 element 1; 0 is reserved for an indication of using memory, and
5851 may or may not be filled in below. */
5852 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5853 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5854
5855 /* If any part of the struct went on the stack put all of it there.
5856 This hack is because the generic code for
5857 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5858 parts of the struct are not at the beginning. */
5859 if (cum->use_stack)
5860 {
5861 if (retval)
5862 return NULL_RTX; /* doesn't go in registers at all */
5863 kbase = 0;
5864 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5865 }
5866 if (k > 1 || cum->use_stack)
5867 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5868 else
5869 return NULL_RTX;
5870}
5871
b78d48dd
FJ
5872/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5873
5874static rtx
ec6376ab 5875rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5876{
ec6376ab
AM
5877 int n_units;
5878 int i, k;
5879 rtx rvec[GP_ARG_NUM_REG + 1];
5880
5881 if (align_words >= GP_ARG_NUM_REG)
5882 return NULL_RTX;
5883
5884 n_units = rs6000_arg_size (mode, type);
5885
5886 /* Optimize the simple case where the arg fits in one gpr, except in
5887 the case of BLKmode due to assign_parms assuming that registers are
5888 BITS_PER_WORD wide. */
5889 if (n_units == 0
5890 || (n_units == 1 && mode != BLKmode))
5891 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5892
5893 k = 0;
5894 if (align_words + n_units > GP_ARG_NUM_REG)
5895 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5896 using a magic NULL_RTX component.
79773478
AM
5897 This is not strictly correct. Only some of the arg belongs in
5898 memory, not all of it. However, the normal scheme using
5899 function_arg_partial_nregs can result in unusual subregs, eg.
5900 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5901 store the whole arg to memory is often more efficient than code
5902 to store pieces, and we know that space is available in the right
5903 place for the whole arg. */
ec6376ab
AM
5904 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5905
5906 i = 0;
5907 do
36a454e1 5908 {
ec6376ab
AM
5909 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5910 rtx off = GEN_INT (i++ * 4);
5911 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5912 }
ec6376ab
AM
5913 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5914
5915 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5916}
5917
4697a36c
MM
5918/* Determine where to put an argument to a function.
5919 Value is zero to push the argument on the stack,
5920 or a hard register in which to store the argument.
5921
5922 MODE is the argument's machine mode.
5923 TYPE is the data type of the argument (as a tree).
5924 This is null for libcalls where that information may
5925 not be available.
5926 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5927 the preceding args and about the function being called. It is
5928 not modified in this routine.
4697a36c
MM
5929 NAMED is nonzero if this argument is a named parameter
5930 (otherwise it is an extra parameter matching an ellipsis).
5931
5932 On RS/6000 the first eight words of non-FP are normally in registers
5933 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5934 Under V.4, the first 8 FP args are in registers.
5935
5936 If this is floating-point and no prototype is specified, we use
5937 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5938 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5939 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5940 doesn't support PARALLEL anyway.
5941
5942 Note that for args passed by reference, function_arg will be called
5943 with MODE and TYPE set to that of the pointer to the arg, not the arg
5944 itself. */
4697a36c 5945
9390387d 5946rtx
f676971a 5947function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5948 tree type, int named)
4697a36c 5949{
4cc833b7 5950 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5951
a4f6c312
SS
5952 /* Return a marker to indicate whether CR1 needs to set or clear the
5953 bit that V.4 uses to say fp args were passed in registers.
5954 Assume that we don't need the marker for software floating point,
5955 or compiler generated library calls. */
4697a36c
MM
5956 if (mode == VOIDmode)
5957 {
f607bc57 5958 if (abi == ABI_V4
b9599e46 5959 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5960 && (cum->stdarg
5961 || (cum->nargs_prototype < 0
5962 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5963 {
a3170dc6
AH
5964 /* For the SPE, we need to crxor CR6 always. */
5965 if (TARGET_SPE_ABI)
5966 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5967 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5968 return GEN_INT (cum->call_cookie
5969 | ((cum->fregno == FP_ARG_MIN_REG)
5970 ? CALL_V4_SET_FP_ARGS
5971 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5972 }
4697a36c 5973
7509c759 5974 return GEN_INT (cum->call_cookie);
4697a36c
MM
5975 }
5976
0b5383eb
DJ
5977 if (rs6000_darwin64_abi && mode == BLKmode
5978 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5979 {
0b5383eb 5980 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5981 if (rslt != NULL_RTX)
5982 return rslt;
5983 /* Else fall through to usual handling. */
5984 }
5985
2858f73a 5986 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5987 if (TARGET_64BIT && ! cum->prototype)
5988 {
c4ad648e
AM
5989 /* Vector parameters get passed in vector register
5990 and also in GPRs or memory, in absence of prototype. */
5991 int align_words;
5992 rtx slot;
5993 align_words = (cum->words + 1) & ~1;
5994
5995 if (align_words >= GP_ARG_NUM_REG)
5996 {
5997 slot = NULL_RTX;
5998 }
5999 else
6000 {
6001 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6002 }
6003 return gen_rtx_PARALLEL (mode,
6004 gen_rtvec (2,
6005 gen_rtx_EXPR_LIST (VOIDmode,
6006 slot, const0_rtx),
6007 gen_rtx_EXPR_LIST (VOIDmode,
6008 gen_rtx_REG (mode, cum->vregno),
6009 const0_rtx)));
c72d6c26
HP
6010 }
6011 else
6012 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6013 else if (TARGET_ALTIVEC_ABI
6014 && (ALTIVEC_VECTOR_MODE (mode)
6015 || (type && TREE_CODE (type) == VECTOR_TYPE
6016 && int_size_in_bytes (type) == 16)))
0ac081f6 6017 {
2858f73a 6018 if (named || abi == ABI_V4)
a594a19c 6019 return NULL_RTX;
0ac081f6 6020 else
a594a19c
GK
6021 {
6022 /* Vector parameters to varargs functions under AIX or Darwin
6023 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6024 int align, align_words, n_words;
6025 enum machine_mode part_mode;
a594a19c
GK
6026
6027 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6028 2 mod 4 in terms of words in 32-bit mode, since the parameter
6029 save area starts at offset 24 from the stack. In 64-bit mode,
6030 they just have to start on an even word, since the parameter
6031 save area is 16-byte aligned. */
6032 if (TARGET_32BIT)
4ed78545 6033 align = (2 - cum->words) & 3;
2858f73a
GK
6034 else
6035 align = cum->words & 1;
a594a19c
GK
6036 align_words = cum->words + align;
6037
6038 /* Out of registers? Memory, then. */
6039 if (align_words >= GP_ARG_NUM_REG)
6040 return NULL_RTX;
ec6376ab
AM
6041
6042 if (TARGET_32BIT && TARGET_POWERPC64)
6043 return rs6000_mixed_function_arg (mode, type, align_words);
6044
2858f73a
GK
6045 /* The vector value goes in GPRs. Only the part of the
6046 value in GPRs is reported here. */
ec6376ab
AM
6047 part_mode = mode;
6048 n_words = rs6000_arg_size (mode, type);
6049 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6050 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6051 is either wholly in GPRs or half in GPRs and half not. */
6052 part_mode = DImode;
ec6376ab
AM
6053
6054 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6055 }
0ac081f6 6056 }
f82f556d
AH
6057 else if (TARGET_SPE_ABI && TARGET_SPE
6058 && (SPE_VECTOR_MODE (mode)
18f63bfa 6059 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 6060 || mode == DDmode
17caeff2
JM
6061 || mode == DCmode
6062 || mode == TFmode
7393f7f8 6063 || mode == TDmode
17caeff2 6064 || mode == TCmode))))
a6c9bed4 6065 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6066
f607bc57 6067 else if (abi == ABI_V4)
4697a36c 6068 {
a3170dc6 6069 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6070 && (mode == SFmode || mode == DFmode
7393f7f8 6071 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6072 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6073 {
2d83f070
JJ
6074 /* _Decimal128 must use an even/odd register pair. This assumes
6075 that the register number is odd when fregno is odd. */
6076 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6077 cum->fregno++;
6078
6079 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6080 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6081 return gen_rtx_REG (mode, cum->fregno);
6082 else
b78d48dd 6083 return NULL_RTX;
4cc833b7
RH
6084 }
6085 else
6086 {
b2d04ecf 6087 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6088 int gregno = cum->sysv_gregno;
6089
4ed78545
AM
6090 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6091 (r7,r8) or (r9,r10). As does any other 2 word item such
6092 as complex int due to a historical mistake. */
6093 if (n_words == 2)
6094 gregno += (1 - gregno) & 1;
4cc833b7 6095
4ed78545 6096 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6097 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6098 return NULL_RTX;
ec6376ab
AM
6099
6100 if (TARGET_32BIT && TARGET_POWERPC64)
6101 return rs6000_mixed_function_arg (mode, type,
6102 gregno - GP_ARG_MIN_REG);
6103 return gen_rtx_REG (mode, gregno);
4cc833b7 6104 }
4697a36c 6105 }
4cc833b7
RH
6106 else
6107 {
294bd182 6108 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6109
2d83f070
JJ
6110 /* _Decimal128 must be passed in an even/odd float register pair.
6111 This assumes that the register number is odd when fregno is odd. */
6112 if (mode == TDmode && (cum->fregno % 2) == 1)
6113 cum->fregno++;
6114
2858f73a 6115 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6116 {
ec6376ab
AM
6117 rtx rvec[GP_ARG_NUM_REG + 1];
6118 rtx r;
6119 int k;
c53bdcf5
AM
6120 bool needs_psave;
6121 enum machine_mode fmode = mode;
c53bdcf5
AM
6122 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6123
6124 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6125 {
c53bdcf5
AM
6126 /* Currently, we only ever need one reg here because complex
6127 doubles are split. */
7393f7f8
BE
6128 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6129 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6130
7393f7f8
BE
6131 /* Long double or _Decimal128 split over regs and memory. */
6132 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6133 }
c53bdcf5
AM
6134
6135 /* Do we also need to pass this arg in the parameter save
6136 area? */
6137 needs_psave = (type
6138 && (cum->nargs_prototype <= 0
6139 || (DEFAULT_ABI == ABI_AIX
de17c25f 6140 && TARGET_XL_COMPAT
c53bdcf5
AM
6141 && align_words >= GP_ARG_NUM_REG)));
6142
6143 if (!needs_psave && mode == fmode)
ec6376ab 6144 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6145
ec6376ab 6146 k = 0;
c53bdcf5
AM
6147 if (needs_psave)
6148 {
ec6376ab 6149 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6150 This piece must come first, before the fprs. */
c53bdcf5
AM
6151 if (align_words < GP_ARG_NUM_REG)
6152 {
6153 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6154
6155 if (align_words + n_words > GP_ARG_NUM_REG
6156 || (TARGET_32BIT && TARGET_POWERPC64))
6157 {
6158 /* If this is partially on the stack, then we only
6159 include the portion actually in registers here. */
6160 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6161 rtx off;
79773478
AM
6162 int i = 0;
6163 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6164 /* Not all of the arg fits in gprs. Say that it
6165 goes in memory too, using a magic NULL_RTX
6166 component. Also see comment in
6167 rs6000_mixed_function_arg for why the normal
6168 function_arg_partial_nregs scheme doesn't work
6169 in this case. */
6170 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6171 const0_rtx);
ec6376ab
AM
6172 do
6173 {
6174 r = gen_rtx_REG (rmode,
6175 GP_ARG_MIN_REG + align_words);
2e6c9641 6176 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6177 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6178 }
6179 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6180 }
6181 else
6182 {
6183 /* The whole arg fits in gprs. */
6184 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6185 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6186 }
c53bdcf5 6187 }
ec6376ab
AM
6188 else
6189 /* It's entirely in memory. */
6190 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6191 }
6192
ec6376ab
AM
6193 /* Describe where this piece goes in the fprs. */
6194 r = gen_rtx_REG (fmode, cum->fregno);
6195 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6196
6197 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6198 }
6199 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6200 {
ec6376ab
AM
6201 if (TARGET_32BIT && TARGET_POWERPC64)
6202 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6203
4eeca74f
AM
6204 if (mode == BLKmode)
6205 mode = Pmode;
6206
b2d04ecf
AM
6207 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6208 }
4cc833b7
RH
6209 else
6210 return NULL_RTX;
4697a36c 6211 }
4697a36c
MM
6212}
6213\f
ec6376ab 6214/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6215 the number of bytes passed in registers. For args passed entirely in
6216 registers or entirely in memory, zero. When an arg is described by a
6217 PARALLEL, perhaps using more than one register type, this function
6218 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6219
78a52f11
RH
6220static int
6221rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6222 tree type, bool named)
4697a36c 6223{
c53bdcf5 6224 int ret = 0;
ec6376ab 6225 int align_words;
c53bdcf5 6226
f607bc57 6227 if (DEFAULT_ABI == ABI_V4)
4697a36c 6228 return 0;
4697a36c 6229
c53bdcf5
AM
6230 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6231 && cum->nargs_prototype >= 0)
6232 return 0;
6233
0b5383eb
DJ
6234 /* In this complicated case we just disable the partial_nregs code. */
6235 if (rs6000_darwin64_abi && mode == BLKmode
6236 && TREE_CODE (type) == RECORD_TYPE
6237 && int_size_in_bytes (type) > 0)
6238 return 0;
6239
294bd182 6240 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6241
79773478
AM
6242 if (USE_FP_FOR_ARG_P (cum, mode, type))
6243 {
fb63c729
AM
6244 /* If we are passing this arg in the fixed parameter save area
6245 (gprs or memory) as well as fprs, then this function should
79773478
AM
6246 return the number of partial bytes passed in the parameter
6247 save area rather than partial bytes passed in fprs. */
6248 if (type
6249 && (cum->nargs_prototype <= 0
6250 || (DEFAULT_ABI == ABI_AIX
6251 && TARGET_XL_COMPAT
6252 && align_words >= GP_ARG_NUM_REG)))
6253 return 0;
6254 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6255 > FP_ARG_MAX_REG + 1)
ac7e839c 6256 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6257 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6258 return 0;
6259 }
6260
ec6376ab
AM
6261 if (align_words < GP_ARG_NUM_REG
6262 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6263 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6264
c53bdcf5 6265 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6266 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6267
c53bdcf5 6268 return ret;
4697a36c
MM
6269}
6270\f
6271/* A C expression that indicates when an argument must be passed by
6272 reference. If nonzero for an argument, a copy of that argument is
6273 made in memory and a pointer to the argument is passed instead of
6274 the argument itself. The pointer is passed in whatever way is
6275 appropriate for passing a pointer to that type.
6276
b2d04ecf
AM
6277 Under V.4, aggregates and long double are passed by reference.
6278
6279 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6280 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6281
6282 As an extension to all ABIs, variable sized types are passed by
6283 reference. */
4697a36c 6284
8cd5a4e0 6285static bool
f676971a 6286rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6287 enum machine_mode mode, const_tree type,
bada2eb8 6288 bool named ATTRIBUTE_UNUSED)
4697a36c 6289{
602ea4d3 6290 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6291 {
6292 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6293 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6294 return 1;
6295 }
6296
6297 if (!type)
6298 return 0;
4697a36c 6299
bada2eb8
DE
6300 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6301 {
6302 if (TARGET_DEBUG_ARG)
6303 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6304 return 1;
6305 }
6306
6307 if (int_size_in_bytes (type) < 0)
6308 {
6309 if (TARGET_DEBUG_ARG)
6310 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6311 return 1;
6312 }
6313
6314 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6315 modes only exist for GCC vector types if -maltivec. */
6316 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6317 {
6318 if (TARGET_DEBUG_ARG)
6319 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6320 return 1;
6321 }
b693336b
PB
6322
6323 /* Pass synthetic vectors in memory. */
bada2eb8 6324 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6325 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6326 {
6327 static bool warned_for_pass_big_vectors = false;
6328 if (TARGET_DEBUG_ARG)
6329 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6330 if (!warned_for_pass_big_vectors)
6331 {
d4ee4d25 6332 warning (0, "GCC vector passed by reference: "
b693336b
PB
6333 "non-standard ABI extension with no compatibility guarantee");
6334 warned_for_pass_big_vectors = true;
6335 }
6336 return 1;
6337 }
6338
b2d04ecf 6339 return 0;
4697a36c 6340}
5985c7a6
FJ
6341
6342static void
2d9db8eb 6343rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6344{
6345 int i;
6346 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6347
6348 if (nregs == 0)
6349 return;
6350
c4ad648e 6351 for (i = 0; i < nregs; i++)
5985c7a6 6352 {
9390387d 6353 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6354 if (reload_completed)
c4ad648e
AM
6355 {
6356 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6357 tem = NULL_RTX;
6358 else
6359 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6360 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6361 }
5985c7a6
FJ
6362 else
6363 tem = replace_equiv_address (tem, XEXP (tem, 0));
6364
37409796 6365 gcc_assert (tem);
5985c7a6
FJ
6366
6367 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6368 }
6369}
4697a36c
MM
6370\f
6371/* Perform any needed actions needed for a function that is receiving a
f676971a 6372 variable number of arguments.
4697a36c
MM
6373
6374 CUM is as above.
6375
6376 MODE and TYPE are the mode and type of the current parameter.
6377
6378 PRETEND_SIZE is a variable that should be set to the amount of stack
6379 that must be pushed by the prolog to pretend that our caller pushed
6380 it.
6381
6382 Normally, this macro will push all remaining incoming registers on the
6383 stack and set PRETEND_SIZE to the length of the registers pushed. */
6384
c6e8c921 6385static void
f676971a 6386setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6387 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6388 int no_rtl)
4697a36c 6389{
4cc833b7
RH
6390 CUMULATIVE_ARGS next_cum;
6391 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6392 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6393 int first_reg_offset;
6394 alias_set_type set;
4697a36c 6395
f31bf321 6396 /* Skip the last named argument. */
d34c5b80 6397 next_cum = *cum;
594a51fe 6398 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6399
f607bc57 6400 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6401 {
5b667039
JJ
6402 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6403
60e2d0ca 6404 if (! no_rtl)
5b667039
JJ
6405 {
6406 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6407 HOST_WIDE_INT offset = 0;
6408
6409 /* Try to optimize the size of the varargs save area.
6410 The ABI requires that ap.reg_save_area is doubleword
6411 aligned, but we don't need to allocate space for all
6412 the bytes, only those to which we actually will save
6413 anything. */
6414 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6415 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6416 if (TARGET_HARD_FLOAT && TARGET_FPRS
6417 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6418 && cfun->va_list_fpr_size)
6419 {
6420 if (gpr_reg_num)
6421 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6422 * UNITS_PER_FP_WORD;
6423 if (cfun->va_list_fpr_size
6424 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6425 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6426 else
6427 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6428 * UNITS_PER_FP_WORD;
6429 }
6430 if (gpr_reg_num)
6431 {
6432 offset = -((first_reg_offset * reg_size) & ~7);
6433 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6434 {
6435 gpr_reg_num = cfun->va_list_gpr_size;
6436 if (reg_size == 4 && (first_reg_offset & 1))
6437 gpr_reg_num++;
6438 }
6439 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6440 }
6441 else if (fpr_size)
6442 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6443 * UNITS_PER_FP_WORD
6444 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6445
5b667039
JJ
6446 if (gpr_size + fpr_size)
6447 {
6448 rtx reg_save_area
6449 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6450 gcc_assert (GET_CODE (reg_save_area) == MEM);
6451 reg_save_area = XEXP (reg_save_area, 0);
6452 if (GET_CODE (reg_save_area) == PLUS)
6453 {
6454 gcc_assert (XEXP (reg_save_area, 0)
6455 == virtual_stack_vars_rtx);
6456 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6457 offset += INTVAL (XEXP (reg_save_area, 1));
6458 }
6459 else
6460 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6461 }
6462
6463 cfun->machine->varargs_save_offset = offset;
6464 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6465 }
4697a36c 6466 }
60e2d0ca 6467 else
4697a36c 6468 {
d34c5b80 6469 first_reg_offset = next_cum.words;
4cc833b7 6470 save_area = virtual_incoming_args_rtx;
4697a36c 6471
fe984136 6472 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6473 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6474 }
4697a36c 6475
dfafc897 6476 set = get_varargs_alias_set ();
9d30f3c1
JJ
6477 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6478 && cfun->va_list_gpr_size)
4cc833b7 6479 {
9d30f3c1
JJ
6480 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6481
6482 if (va_list_gpr_counter_field)
6483 {
6484 /* V4 va_list_gpr_size counts number of registers needed. */
6485 if (nregs > cfun->va_list_gpr_size)
6486 nregs = cfun->va_list_gpr_size;
6487 }
6488 else
6489 {
6490 /* char * va_list instead counts number of bytes needed. */
6491 if (nregs > cfun->va_list_gpr_size / reg_size)
6492 nregs = cfun->va_list_gpr_size / reg_size;
6493 }
6494
dfafc897 6495 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6496 plus_constant (save_area,
13e2e16e
DE
6497 first_reg_offset * reg_size));
6498 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6499 set_mem_alias_set (mem, set);
8ac61af7 6500 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6501
f676971a 6502 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6503 nregs);
4697a36c
MM
6504 }
6505
4697a36c 6506 /* Save FP registers if needed. */
f607bc57 6507 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6508 && TARGET_HARD_FLOAT && TARGET_FPRS
6509 && ! no_rtl
9d30f3c1
JJ
6510 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6511 && cfun->va_list_fpr_size)
4697a36c 6512 {
9d30f3c1 6513 int fregno = next_cum.fregno, nregs;
9ebbca7d 6514 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6515 rtx lab = gen_label_rtx ();
5b667039
JJ
6516 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6517 * UNITS_PER_FP_WORD);
4697a36c 6518
c4ad648e
AM
6519 emit_jump_insn
6520 (gen_rtx_SET (VOIDmode,
6521 pc_rtx,
6522 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6523 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6524 const0_rtx),
39403d82 6525 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6526 pc_rtx)));
6527
9d30f3c1
JJ
6528 for (nregs = 0;
6529 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6530 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6531 {
5496b36f 6532 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6533 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6534 set_mem_alias_set (mem, set);
94ff898d 6535 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6536 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6537 }
4cc833b7
RH
6538
6539 emit_label (lab);
4697a36c 6540 }
4697a36c 6541}
4697a36c 6542
dfafc897 6543/* Create the va_list data type. */
2c4974b7 6544
c35d187f
RH
6545static tree
6546rs6000_build_builtin_va_list (void)
dfafc897 6547{
64c2816f 6548 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6549
9ebbca7d
GK
6550 /* For AIX, prefer 'char *' because that's what the system
6551 header files like. */
f607bc57 6552 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6553 return build_pointer_type (char_type_node);
dfafc897 6554
f1e639b1 6555 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6556 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6557
f676971a 6558 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6559 unsigned_char_type_node);
f676971a 6560 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6561 unsigned_char_type_node);
64c2816f
DT
6562 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6563 every user file. */
6564 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6565 short_unsigned_type_node);
dfafc897
FS
6566 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6567 ptr_type_node);
6568 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6569 ptr_type_node);
6570
9d30f3c1
JJ
6571 va_list_gpr_counter_field = f_gpr;
6572 va_list_fpr_counter_field = f_fpr;
6573
dfafc897
FS
6574 DECL_FIELD_CONTEXT (f_gpr) = record;
6575 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6576 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6577 DECL_FIELD_CONTEXT (f_ovf) = record;
6578 DECL_FIELD_CONTEXT (f_sav) = record;
6579
bab45a51
FS
6580 TREE_CHAIN (record) = type_decl;
6581 TYPE_NAME (record) = type_decl;
dfafc897
FS
6582 TYPE_FIELDS (record) = f_gpr;
6583 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6584 TREE_CHAIN (f_fpr) = f_res;
6585 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6586 TREE_CHAIN (f_ovf) = f_sav;
6587
6588 layout_type (record);
6589
6590 /* The correct type is an array type of one element. */
6591 return build_array_type (record, build_index_type (size_zero_node));
6592}
6593
6594/* Implement va_start. */
6595
d7bd8aeb 6596static void
a2369ed3 6597rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6598{
dfafc897 6599 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6600 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6601 tree gpr, fpr, ovf, sav, t;
2c4974b7 6602
dfafc897 6603 /* Only SVR4 needs something special. */
f607bc57 6604 if (DEFAULT_ABI != ABI_V4)
dfafc897 6605 {
e5faf155 6606 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6607 return;
6608 }
6609
973a648b 6610 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6611 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6612 f_res = TREE_CHAIN (f_fpr);
6613 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6614 f_sav = TREE_CHAIN (f_ovf);
6615
872a65b5 6616 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6617 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6618 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6619 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6620 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6621
6622 /* Count number of gp and fp argument registers used. */
4cc833b7 6623 words = current_function_args_info.words;
987732e0
DE
6624 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6625 GP_ARG_NUM_REG);
6626 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6627 FP_ARG_NUM_REG);
dfafc897
FS
6628
6629 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6630 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6631 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6632 words, n_gpr, n_fpr);
dfafc897 6633
9d30f3c1
JJ
6634 if (cfun->va_list_gpr_size)
6635 {
07beea0d 6636 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6637 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6638 TREE_SIDE_EFFECTS (t) = 1;
6639 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6640 }
58c8adc1 6641
9d30f3c1
JJ
6642 if (cfun->va_list_fpr_size)
6643 {
07beea0d 6644 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6645 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6646 TREE_SIDE_EFFECTS (t) = 1;
6647 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6648 }
dfafc897
FS
6649
6650 /* Find the overflow area. */
6651 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6652 if (words != 0)
5be014d5
AP
6653 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6654 size_int (words * UNITS_PER_WORD));
07beea0d 6655 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6656 TREE_SIDE_EFFECTS (t) = 1;
6657 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6658
9d30f3c1
JJ
6659 /* If there were no va_arg invocations, don't set up the register
6660 save area. */
6661 if (!cfun->va_list_gpr_size
6662 && !cfun->va_list_fpr_size
6663 && n_gpr < GP_ARG_NUM_REG
6664 && n_fpr < FP_ARG_V4_MAX_REG)
6665 return;
6666
dfafc897
FS
6667 /* Find the register save area. */
6668 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6669 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6670 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6671 size_int (cfun->machine->varargs_save_offset));
07beea0d 6672 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6673 TREE_SIDE_EFFECTS (t) = 1;
6674 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6675}
6676
6677/* Implement va_arg. */
6678
23a60a04
JM
6679tree
6680rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6681{
cd3ce9b4
JM
6682 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6683 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6684 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6685 tree lab_false, lab_over, addr;
6686 int align;
6687 tree ptrtype = build_pointer_type (type);
7393f7f8 6688 int regalign = 0;
cd3ce9b4 6689
08b0dc1b
RH
6690 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6691 {
6692 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6693 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6694 }
6695
cd3ce9b4
JM
6696 if (DEFAULT_ABI != ABI_V4)
6697 {
08b0dc1b 6698 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6699 {
6700 tree elem_type = TREE_TYPE (type);
6701 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6702 int elem_size = GET_MODE_SIZE (elem_mode);
6703
6704 if (elem_size < UNITS_PER_WORD)
6705 {
23a60a04 6706 tree real_part, imag_part;
cd3ce9b4
JM
6707 tree post = NULL_TREE;
6708
23a60a04
JM
6709 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6710 &post);
6711 /* Copy the value into a temporary, lest the formal temporary
6712 be reused out from under us. */
6713 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6714 append_to_statement_list (post, pre_p);
6715
23a60a04
JM
6716 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6717 post_p);
cd3ce9b4 6718
47a25a46 6719 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6720 }
6721 }
6722
23a60a04 6723 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6724 }
6725
6726 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6727 f_fpr = TREE_CHAIN (f_gpr);
6728 f_res = TREE_CHAIN (f_fpr);
6729 f_ovf = TREE_CHAIN (f_res);
6730 f_sav = TREE_CHAIN (f_ovf);
6731
872a65b5 6732 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6733 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6734 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6735 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6736 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6737
6738 size = int_size_in_bytes (type);
6739 rsize = (size + 3) / 4;
6740 align = 1;
6741
08b0dc1b 6742 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6743 && (TYPE_MODE (type) == SFmode
6744 || TYPE_MODE (type) == DFmode
7393f7f8 6745 || TYPE_MODE (type) == TFmode
e41b2a33 6746 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6747 || TYPE_MODE (type) == DDmode
6748 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6749 {
6750 /* FP args go in FP registers, if present. */
cd3ce9b4 6751 reg = fpr;
602ea4d3 6752 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6753 sav_ofs = 8*4;
6754 sav_scale = 8;
e41b2a33 6755 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6756 align = 8;
6757 }
6758 else
6759 {
6760 /* Otherwise into GP registers. */
cd3ce9b4
JM
6761 reg = gpr;
6762 n_reg = rsize;
6763 sav_ofs = 0;
6764 sav_scale = 4;
6765 if (n_reg == 2)
6766 align = 8;
6767 }
6768
6769 /* Pull the value out of the saved registers.... */
6770
6771 lab_over = NULL;
6772 addr = create_tmp_var (ptr_type_node, "addr");
6773 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6774
6775 /* AltiVec vectors never go in registers when -mabi=altivec. */
6776 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6777 align = 16;
6778 else
6779 {
6780 lab_false = create_artificial_label ();
6781 lab_over = create_artificial_label ();
6782
6783 /* Long long and SPE vectors are aligned in the registers.
6784 As are any other 2 gpr item such as complex int due to a
6785 historical mistake. */
6786 u = reg;
602ea4d3 6787 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6788 {
7393f7f8 6789 regalign = 1;
cd3ce9b4 6790 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6791 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6792 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6793 }
7393f7f8
BE
6794 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6795 reg number is 0 for f1, so we want to make it odd. */
6796 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6797 {
6798 regalign = 1;
6799 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6800 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6801 }
cd3ce9b4 6802
95674810 6803 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6804 t = build2 (GE_EXPR, boolean_type_node, u, t);
6805 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6806 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6807 gimplify_and_add (t, pre_p);
6808
6809 t = sav;
6810 if (sav_ofs)
5be014d5 6811 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6812
8fb632eb
ZD
6813 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6814 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6815 u = fold_convert (sizetype, u);
6816 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6817 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6818
e41b2a33
PB
6819 /* _Decimal32 varargs are located in the second word of the 64-bit
6820 FP register for 32-bit binaries. */
6821 if (!TARGET_POWERPC64 && TYPE_MODE (type) == SDmode)
6822 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6823
07beea0d 6824 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6825 gimplify_and_add (t, pre_p);
6826
6827 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6828 gimplify_and_add (t, pre_p);
6829
6830 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6831 append_to_statement_list (t, pre_p);
6832
7393f7f8 6833 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6834 {
6835 /* Ensure that we don't find any more args in regs.
7393f7f8 6836 Alignment has taken care of for special cases. */
07beea0d 6837 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6838 gimplify_and_add (t, pre_p);
6839 }
6840 }
6841
6842 /* ... otherwise out of the overflow area. */
6843
6844 /* Care for on-stack alignment if needed. */
6845 t = ovf;
6846 if (align != 1)
6847 {
5be014d5
AP
6848 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6849 t = fold_convert (sizetype, t);
4a90aeeb 6850 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6851 size_int (-align));
6852 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6853 }
6854 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6855
07beea0d 6856 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6857 gimplify_and_add (u, pre_p);
6858
5be014d5 6859 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6860 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6861 gimplify_and_add (t, pre_p);
6862
6863 if (lab_over)
6864 {
6865 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6866 append_to_statement_list (t, pre_p);
6867 }
6868
0cfbc62b
JM
6869 if (STRICT_ALIGNMENT
6870 && (TYPE_ALIGN (type)
6871 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6872 {
6873 /* The value (of type complex double, for example) may not be
6874 aligned in memory in the saved registers, so copy via a
6875 temporary. (This is the same code as used for SPARC.) */
6876 tree tmp = create_tmp_var (type, "va_arg_tmp");
6877 tree dest_addr = build_fold_addr_expr (tmp);
6878
5039610b
SL
6879 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6880 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6881
6882 gimplify_and_add (copy, pre_p);
6883 addr = dest_addr;
6884 }
6885
08b0dc1b 6886 addr = fold_convert (ptrtype, addr);
872a65b5 6887 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6888}
6889
0ac081f6
AH
6890/* Builtins. */
6891
58646b77
PB
6892static void
6893def_builtin (int mask, const char *name, tree type, int code)
6894{
96038623 6895 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6896 {
6897 if (rs6000_builtin_decls[code])
6898 abort ();
6899
6900 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6901 add_builtin_function (name, type, code, BUILT_IN_MD,
6902 NULL, NULL_TREE);
58646b77
PB
6903 }
6904}
0ac081f6 6905
24408032
AH
6906/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6907
2212663f 6908static const struct builtin_description bdesc_3arg[] =
24408032
AH
6909{
6910 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6911 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6912 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6913 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6914 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6915 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6916 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6917 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6918 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6919 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6920 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6921 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6922 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6923 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6924 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6925 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6926 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6927 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6928 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6929 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6930 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6931 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6932 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6933
6934 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6935 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6936 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6937 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6938 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6939 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6940 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6941 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6942 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6943 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6944 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6945 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6946 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6947 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6948 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6949
6950 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6951 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6952 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6953 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6954 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6955 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6956 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6957 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6958 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6959};
2212663f 6960
95385cbb
AH
6961/* DST operations: void foo (void *, const int, const char). */
6962
6963static const struct builtin_description bdesc_dst[] =
6964{
6965 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6968 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6969
6970 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6971 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6972 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6973 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6974};
6975
2212663f 6976/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6977
a3170dc6 6978static struct builtin_description bdesc_2arg[] =
0ac081f6 6979{
f18c054f
DB
6980 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6981 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6982 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6983 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6984 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6985 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6986 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6987 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6988 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6989 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6990 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6991 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6992 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6993 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6994 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6995 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6996 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6997 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6998 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6999 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7000 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7005 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7006 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7007 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7008 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7009 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7010 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7011 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7012 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7013 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7014 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7015 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7016 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7017 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7018 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7019 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7020 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7021 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7022 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7026 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7027 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7029 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7030 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7031 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7032 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7033 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7034 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7035 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7036 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7037 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7038 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7039 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7040 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7041 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7042 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7043 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7044 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7045 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7048 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7049 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7050 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7051 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7052 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7053 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7054 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7055 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7056 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7057 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
7058 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7059 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7060 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
7061 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7062 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7063 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7064 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7065 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
7066 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7067 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7068 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7069 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7070 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7071 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7072 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7073 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7074 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7075 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7076 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7077 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7078 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7079 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7080 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7081 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7082 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7083 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7084 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7085 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7087 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7090 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7091
58646b77
PB
7092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7219
96038623
DE
7220 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7221 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7222 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7223 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7224 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7225 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7226 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7227 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7228 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7229 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7230
a3170dc6
AH
7231 /* Place holder, leave as first spe builtin. */
7232 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7233 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7234 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7235 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7236 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7237 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7238 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7239 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7240 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7241 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7242 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7243 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7244 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7245 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7246 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7247 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7248 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7249 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7250 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7251 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7252 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7253 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7254 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7255 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7256 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7257 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7258 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7259 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7260 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7261 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7262 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7263 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7264 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7265 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7266 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7267 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7268 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7269 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7270 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7271 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7272 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7273 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7274 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7275 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7276 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7277 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7278 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7279 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7280 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7281 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7282 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7283 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7284 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7285 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7286 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7287 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7288 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7289 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7290 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7291 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7292 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7293 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7294 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7295 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7296 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7297 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7298 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7299 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7300 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7301 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7302 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7303 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7304 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7305 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7306 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7307 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7308 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7309 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7310 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7311 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7312 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7313 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7314 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7315 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7316 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7317 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7318 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7319 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7320 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7321 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7322 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7323 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7324 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7325 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7326 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7327 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7328 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7329 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7330 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7331 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7332 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7333 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7334 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7335 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7336 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7337 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7338 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7339 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7340 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7341
7342 /* SPE binary operations expecting a 5-bit unsigned literal. */
7343 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7344
7345 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7346 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7347 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7348 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7349 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7350 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7351 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7352 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7353 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7354 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7355 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7356 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7357 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7358 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7359 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7360 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7361 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7362 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7363 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7364 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7365 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7366 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7367 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7368 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7369 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7370 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7371
7372 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7373 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7374};
7375
7376/* AltiVec predicates. */
7377
7378struct builtin_description_predicates
7379{
7380 const unsigned int mask;
7381 const enum insn_code icode;
7382 const char *opcode;
7383 const char *const name;
7384 const enum rs6000_builtins code;
7385};
7386
7387static const struct builtin_description_predicates bdesc_altivec_preds[] =
7388{
7389 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7390 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7391 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7392 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7393 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7394 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7395 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7396 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7397 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7398 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7399 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7400 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7401 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7402
7403 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7404 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7405 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7406};
24408032 7407
a3170dc6
AH
7408/* SPE predicates. */
7409static struct builtin_description bdesc_spe_predicates[] =
7410{
7411 /* Place-holder. Leave as first. */
7412 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7413 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7414 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7415 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7416 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7417 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7418 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7419 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7420 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7421 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7422 /* Place-holder. Leave as last. */
7423 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7424};
7425
7426/* SPE evsel predicates. */
7427static struct builtin_description bdesc_spe_evsel[] =
7428{
7429 /* Place-holder. Leave as first. */
7430 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7431 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7432 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7433 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7434 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7435 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7436 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7437 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7438 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7439 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7440 /* Place-holder. Leave as last. */
7441 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7442};
7443
96038623
DE
7444/* PAIRED predicates. */
7445static const struct builtin_description bdesc_paired_preds[] =
7446{
7447 /* Place-holder. Leave as first. */
7448 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7449 /* Place-holder. Leave as last. */
7450 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7451};
7452
b6d08ca1 7453/* ABS* operations. */
100c4561
AH
7454
7455static const struct builtin_description bdesc_abs[] =
7456{
7457 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7458 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7459 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7460 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7461 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7462 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7463 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7464};
7465
617e0e1d
DB
7466/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7467 foo (VECa). */
24408032 7468
a3170dc6 7469static struct builtin_description bdesc_1arg[] =
2212663f 7470{
617e0e1d
DB
7471 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7472 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7473 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7474 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7475 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7476 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7477 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7478 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7479 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7480 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7481 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7482 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7483 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7484 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7485 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7486 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7487 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7488
58646b77
PB
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7501 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7502 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7503 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7504 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7505 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7506 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7507 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7508
a3170dc6
AH
7509 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7510 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7511 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7512 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7513 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7514 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7515 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7516 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7517 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7518 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7519 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7520 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7521 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7522 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7523 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7524 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7525 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7526 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7527 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7528 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7529 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7530 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7531 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7532 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7533 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7534 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7535 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7536 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7537 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7538 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7539
7540 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7541 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7542
7543 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7544 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7545 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7546 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7547 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7548};
7549
7550static rtx
5039610b 7551rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7552{
7553 rtx pat;
5039610b 7554 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7555 rtx op0 = expand_normal (arg0);
2212663f
DB
7556 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7557 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7558
0559cc77
DE
7559 if (icode == CODE_FOR_nothing)
7560 /* Builtin not supported on this processor. */
7561 return 0;
7562
20e26713
AH
7563 /* If we got invalid arguments bail out before generating bad rtl. */
7564 if (arg0 == error_mark_node)
9a171fcd 7565 return const0_rtx;
20e26713 7566
0559cc77
DE
7567 if (icode == CODE_FOR_altivec_vspltisb
7568 || icode == CODE_FOR_altivec_vspltish
7569 || icode == CODE_FOR_altivec_vspltisw
7570 || icode == CODE_FOR_spe_evsplatfi
7571 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7572 {
7573 /* Only allow 5-bit *signed* literals. */
b44140e7 7574 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7575 || INTVAL (op0) > 15
7576 || INTVAL (op0) < -16)
b44140e7
AH
7577 {
7578 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7579 return const0_rtx;
b44140e7 7580 }
b44140e7
AH
7581 }
7582
c62f2db5 7583 if (target == 0
2212663f
DB
7584 || GET_MODE (target) != tmode
7585 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7586 target = gen_reg_rtx (tmode);
7587
7588 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7589 op0 = copy_to_mode_reg (mode0, op0);
7590
7591 pat = GEN_FCN (icode) (target, op0);
7592 if (! pat)
7593 return 0;
7594 emit_insn (pat);
0ac081f6 7595
2212663f
DB
7596 return target;
7597}
ae4b4a02 7598
100c4561 7599static rtx
5039610b 7600altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7601{
7602 rtx pat, scratch1, scratch2;
5039610b 7603 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7604 rtx op0 = expand_normal (arg0);
100c4561
AH
7605 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7606 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7607
7608 /* If we have invalid arguments, bail out before generating bad rtl. */
7609 if (arg0 == error_mark_node)
9a171fcd 7610 return const0_rtx;
100c4561
AH
7611
7612 if (target == 0
7613 || GET_MODE (target) != tmode
7614 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7615 target = gen_reg_rtx (tmode);
7616
7617 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7618 op0 = copy_to_mode_reg (mode0, op0);
7619
7620 scratch1 = gen_reg_rtx (mode0);
7621 scratch2 = gen_reg_rtx (mode0);
7622
7623 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7624 if (! pat)
7625 return 0;
7626 emit_insn (pat);
7627
7628 return target;
7629}
7630
0ac081f6 7631static rtx
5039610b 7632rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7633{
7634 rtx pat;
5039610b
SL
7635 tree arg0 = CALL_EXPR_ARG (exp, 0);
7636 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7637 rtx op0 = expand_normal (arg0);
7638 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7639 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7640 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7641 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7642
0559cc77
DE
7643 if (icode == CODE_FOR_nothing)
7644 /* Builtin not supported on this processor. */
7645 return 0;
7646
20e26713
AH
7647 /* If we got invalid arguments bail out before generating bad rtl. */
7648 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7649 return const0_rtx;
20e26713 7650
0559cc77
DE
7651 if (icode == CODE_FOR_altivec_vcfux
7652 || icode == CODE_FOR_altivec_vcfsx
7653 || icode == CODE_FOR_altivec_vctsxs
7654 || icode == CODE_FOR_altivec_vctuxs
7655 || icode == CODE_FOR_altivec_vspltb
7656 || icode == CODE_FOR_altivec_vsplth
7657 || icode == CODE_FOR_altivec_vspltw
7658 || icode == CODE_FOR_spe_evaddiw
7659 || icode == CODE_FOR_spe_evldd
7660 || icode == CODE_FOR_spe_evldh
7661 || icode == CODE_FOR_spe_evldw
7662 || icode == CODE_FOR_spe_evlhhesplat
7663 || icode == CODE_FOR_spe_evlhhossplat
7664 || icode == CODE_FOR_spe_evlhhousplat
7665 || icode == CODE_FOR_spe_evlwhe
7666 || icode == CODE_FOR_spe_evlwhos
7667 || icode == CODE_FOR_spe_evlwhou
7668 || icode == CODE_FOR_spe_evlwhsplat
7669 || icode == CODE_FOR_spe_evlwwsplat
7670 || icode == CODE_FOR_spe_evrlwi
7671 || icode == CODE_FOR_spe_evslwi
7672 || icode == CODE_FOR_spe_evsrwis
f5119d10 7673 || icode == CODE_FOR_spe_evsubifw
0559cc77 7674 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7675 {
7676 /* Only allow 5-bit unsigned literals. */
8bb418a3 7677 STRIP_NOPS (arg1);
b44140e7
AH
7678 if (TREE_CODE (arg1) != INTEGER_CST
7679 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7680 {
7681 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7682 return const0_rtx;
b44140e7 7683 }
b44140e7
AH
7684 }
7685
c62f2db5 7686 if (target == 0
0ac081f6
AH
7687 || GET_MODE (target) != tmode
7688 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7689 target = gen_reg_rtx (tmode);
7690
7691 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7692 op0 = copy_to_mode_reg (mode0, op0);
7693 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7694 op1 = copy_to_mode_reg (mode1, op1);
7695
7696 pat = GEN_FCN (icode) (target, op0, op1);
7697 if (! pat)
7698 return 0;
7699 emit_insn (pat);
7700
7701 return target;
7702}
6525c0e7 7703
ae4b4a02 7704static rtx
f676971a 7705altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7706 tree exp, rtx target)
ae4b4a02
AH
7707{
7708 rtx pat, scratch;
5039610b
SL
7709 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7710 tree arg0 = CALL_EXPR_ARG (exp, 1);
7711 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7712 rtx op0 = expand_normal (arg0);
7713 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7714 enum machine_mode tmode = SImode;
7715 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7716 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7717 int cr6_form_int;
7718
7719 if (TREE_CODE (cr6_form) != INTEGER_CST)
7720 {
7721 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7722 return const0_rtx;
ae4b4a02
AH
7723 }
7724 else
7725 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7726
37409796 7727 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7728
7729 /* If we have invalid arguments, bail out before generating bad rtl. */
7730 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7731 return const0_rtx;
ae4b4a02
AH
7732
7733 if (target == 0
7734 || GET_MODE (target) != tmode
7735 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7736 target = gen_reg_rtx (tmode);
7737
7738 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7739 op0 = copy_to_mode_reg (mode0, op0);
7740 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7741 op1 = copy_to_mode_reg (mode1, op1);
7742
7743 scratch = gen_reg_rtx (mode0);
7744
7745 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7746 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7747 if (! pat)
7748 return 0;
7749 emit_insn (pat);
7750
7751 /* The vec_any* and vec_all* predicates use the same opcodes for two
7752 different operations, but the bits in CR6 will be different
7753 depending on what information we want. So we have to play tricks
7754 with CR6 to get the right bits out.
7755
7756 If you think this is disgusting, look at the specs for the
7757 AltiVec predicates. */
7758
c4ad648e
AM
7759 switch (cr6_form_int)
7760 {
7761 case 0:
7762 emit_insn (gen_cr6_test_for_zero (target));
7763 break;
7764 case 1:
7765 emit_insn (gen_cr6_test_for_zero_reverse (target));
7766 break;
7767 case 2:
7768 emit_insn (gen_cr6_test_for_lt (target));
7769 break;
7770 case 3:
7771 emit_insn (gen_cr6_test_for_lt_reverse (target));
7772 break;
7773 default:
7774 error ("argument 1 of __builtin_altivec_predicate is out of range");
7775 break;
7776 }
ae4b4a02
AH
7777
7778 return target;
7779}
7780
96038623
DE
7781static rtx
7782paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7783{
7784 rtx pat, addr;
7785 tree arg0 = CALL_EXPR_ARG (exp, 0);
7786 tree arg1 = CALL_EXPR_ARG (exp, 1);
7787 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7788 enum machine_mode mode0 = Pmode;
7789 enum machine_mode mode1 = Pmode;
7790 rtx op0 = expand_normal (arg0);
7791 rtx op1 = expand_normal (arg1);
7792
7793 if (icode == CODE_FOR_nothing)
7794 /* Builtin not supported on this processor. */
7795 return 0;
7796
7797 /* If we got invalid arguments bail out before generating bad rtl. */
7798 if (arg0 == error_mark_node || arg1 == error_mark_node)
7799 return const0_rtx;
7800
7801 if (target == 0
7802 || GET_MODE (target) != tmode
7803 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7804 target = gen_reg_rtx (tmode);
7805
7806 op1 = copy_to_mode_reg (mode1, op1);
7807
7808 if (op0 == const0_rtx)
7809 {
7810 addr = gen_rtx_MEM (tmode, op1);
7811 }
7812 else
7813 {
7814 op0 = copy_to_mode_reg (mode0, op0);
7815 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7816 }
7817
7818 pat = GEN_FCN (icode) (target, addr);
7819
7820 if (! pat)
7821 return 0;
7822 emit_insn (pat);
7823
7824 return target;
7825}
7826
b4a62fa0 7827static rtx
5039610b 7828altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7829{
7830 rtx pat, addr;
5039610b
SL
7831 tree arg0 = CALL_EXPR_ARG (exp, 0);
7832 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7833 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7834 enum machine_mode mode0 = Pmode;
7835 enum machine_mode mode1 = Pmode;
84217346
MD
7836 rtx op0 = expand_normal (arg0);
7837 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7838
7839 if (icode == CODE_FOR_nothing)
7840 /* Builtin not supported on this processor. */
7841 return 0;
7842
7843 /* If we got invalid arguments bail out before generating bad rtl. */
7844 if (arg0 == error_mark_node || arg1 == error_mark_node)
7845 return const0_rtx;
7846
7847 if (target == 0
7848 || GET_MODE (target) != tmode
7849 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7850 target = gen_reg_rtx (tmode);
7851
f676971a 7852 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7853
7854 if (op0 == const0_rtx)
7855 {
7856 addr = gen_rtx_MEM (tmode, op1);
7857 }
7858 else
7859 {
7860 op0 = copy_to_mode_reg (mode0, op0);
7861 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7862 }
7863
7864 pat = GEN_FCN (icode) (target, addr);
7865
7866 if (! pat)
7867 return 0;
7868 emit_insn (pat);
7869
7870 return target;
7871}
7872
61bea3b0 7873static rtx
5039610b 7874spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7875{
5039610b
SL
7876 tree arg0 = CALL_EXPR_ARG (exp, 0);
7877 tree arg1 = CALL_EXPR_ARG (exp, 1);
7878 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7879 rtx op0 = expand_normal (arg0);
7880 rtx op1 = expand_normal (arg1);
7881 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7882 rtx pat;
7883 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7884 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7885 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7886
7887 /* Invalid arguments. Bail before doing anything stoopid! */
7888 if (arg0 == error_mark_node
7889 || arg1 == error_mark_node
7890 || arg2 == error_mark_node)
7891 return const0_rtx;
7892
7893 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7894 op0 = copy_to_mode_reg (mode2, op0);
7895 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7896 op1 = copy_to_mode_reg (mode0, op1);
7897 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7898 op2 = copy_to_mode_reg (mode1, op2);
7899
7900 pat = GEN_FCN (icode) (op1, op2, op0);
7901 if (pat)
7902 emit_insn (pat);
7903 return NULL_RTX;
7904}
7905
96038623
DE
7906static rtx
7907paired_expand_stv_builtin (enum insn_code icode, tree exp)
7908{
7909 tree arg0 = CALL_EXPR_ARG (exp, 0);
7910 tree arg1 = CALL_EXPR_ARG (exp, 1);
7911 tree arg2 = CALL_EXPR_ARG (exp, 2);
7912 rtx op0 = expand_normal (arg0);
7913 rtx op1 = expand_normal (arg1);
7914 rtx op2 = expand_normal (arg2);
7915 rtx pat, addr;
7916 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7917 enum machine_mode mode1 = Pmode;
7918 enum machine_mode mode2 = Pmode;
7919
7920 /* Invalid arguments. Bail before doing anything stoopid! */
7921 if (arg0 == error_mark_node
7922 || arg1 == error_mark_node
7923 || arg2 == error_mark_node)
7924 return const0_rtx;
7925
7926 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7927 op0 = copy_to_mode_reg (tmode, op0);
7928
7929 op2 = copy_to_mode_reg (mode2, op2);
7930
7931 if (op1 == const0_rtx)
7932 {
7933 addr = gen_rtx_MEM (tmode, op2);
7934 }
7935 else
7936 {
7937 op1 = copy_to_mode_reg (mode1, op1);
7938 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7939 }
7940
7941 pat = GEN_FCN (icode) (addr, op0);
7942 if (pat)
7943 emit_insn (pat);
7944 return NULL_RTX;
7945}
7946
6525c0e7 7947static rtx
5039610b 7948altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7949{
5039610b
SL
7950 tree arg0 = CALL_EXPR_ARG (exp, 0);
7951 tree arg1 = CALL_EXPR_ARG (exp, 1);
7952 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7953 rtx op0 = expand_normal (arg0);
7954 rtx op1 = expand_normal (arg1);
7955 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7956 rtx pat, addr;
7957 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7958 enum machine_mode mode1 = Pmode;
7959 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7960
7961 /* Invalid arguments. Bail before doing anything stoopid! */
7962 if (arg0 == error_mark_node
7963 || arg1 == error_mark_node
7964 || arg2 == error_mark_node)
9a171fcd 7965 return const0_rtx;
6525c0e7 7966
b4a62fa0
SB
7967 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7968 op0 = copy_to_mode_reg (tmode, op0);
7969
f676971a 7970 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7971
7972 if (op1 == const0_rtx)
7973 {
7974 addr = gen_rtx_MEM (tmode, op2);
7975 }
7976 else
7977 {
7978 op1 = copy_to_mode_reg (mode1, op1);
7979 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7980 }
6525c0e7 7981
b4a62fa0 7982 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7983 if (pat)
7984 emit_insn (pat);
7985 return NULL_RTX;
7986}
7987
2212663f 7988static rtx
5039610b 7989rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7990{
7991 rtx pat;
5039610b
SL
7992 tree arg0 = CALL_EXPR_ARG (exp, 0);
7993 tree arg1 = CALL_EXPR_ARG (exp, 1);
7994 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7995 rtx op0 = expand_normal (arg0);
7996 rtx op1 = expand_normal (arg1);
7997 rtx op2 = expand_normal (arg2);
2212663f
DB
7998 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7999 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8000 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8001 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8002
774b5662
DE
8003 if (icode == CODE_FOR_nothing)
8004 /* Builtin not supported on this processor. */
8005 return 0;
8006
20e26713
AH
8007 /* If we got invalid arguments bail out before generating bad rtl. */
8008 if (arg0 == error_mark_node
8009 || arg1 == error_mark_node
8010 || arg2 == error_mark_node)
9a171fcd 8011 return const0_rtx;
20e26713 8012
aba5fb01
NS
8013 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8014 || icode == CODE_FOR_altivec_vsldoi_v4si
8015 || icode == CODE_FOR_altivec_vsldoi_v8hi
8016 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8017 {
8018 /* Only allow 4-bit unsigned literals. */
8bb418a3 8019 STRIP_NOPS (arg2);
b44140e7
AH
8020 if (TREE_CODE (arg2) != INTEGER_CST
8021 || TREE_INT_CST_LOW (arg2) & ~0xf)
8022 {
8023 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8024 return const0_rtx;
b44140e7 8025 }
b44140e7
AH
8026 }
8027
c62f2db5 8028 if (target == 0
2212663f
DB
8029 || GET_MODE (target) != tmode
8030 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8031 target = gen_reg_rtx (tmode);
8032
8033 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8034 op0 = copy_to_mode_reg (mode0, op0);
8035 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8036 op1 = copy_to_mode_reg (mode1, op1);
8037 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8038 op2 = copy_to_mode_reg (mode2, op2);
8039
49e39588
RE
8040 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8041 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8042 else
8043 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8044 if (! pat)
8045 return 0;
8046 emit_insn (pat);
8047
8048 return target;
8049}
92898235 8050
3a9b8c7e 8051/* Expand the lvx builtins. */
0ac081f6 8052static rtx
a2369ed3 8053altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8054{
5039610b 8055 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8056 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8057 tree arg0;
8058 enum machine_mode tmode, mode0;
7c3abc73 8059 rtx pat, op0;
3a9b8c7e 8060 enum insn_code icode;
92898235 8061
0ac081f6
AH
8062 switch (fcode)
8063 {
f18c054f 8064 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8065 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8066 break;
f18c054f 8067 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8068 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8069 break;
8070 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8071 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8072 break;
8073 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8074 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8075 break;
8076 default:
8077 *expandedp = false;
8078 return NULL_RTX;
8079 }
0ac081f6 8080
3a9b8c7e 8081 *expandedp = true;
f18c054f 8082
5039610b 8083 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8084 op0 = expand_normal (arg0);
3a9b8c7e
AH
8085 tmode = insn_data[icode].operand[0].mode;
8086 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8087
3a9b8c7e
AH
8088 if (target == 0
8089 || GET_MODE (target) != tmode
8090 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8091 target = gen_reg_rtx (tmode);
24408032 8092
3a9b8c7e
AH
8093 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8094 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8095
3a9b8c7e
AH
8096 pat = GEN_FCN (icode) (target, op0);
8097 if (! pat)
8098 return 0;
8099 emit_insn (pat);
8100 return target;
8101}
f18c054f 8102
3a9b8c7e
AH
8103/* Expand the stvx builtins. */
8104static rtx
f676971a 8105altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8106 bool *expandedp)
3a9b8c7e 8107{
5039610b 8108 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8109 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8110 tree arg0, arg1;
8111 enum machine_mode mode0, mode1;
7c3abc73 8112 rtx pat, op0, op1;
3a9b8c7e 8113 enum insn_code icode;
f18c054f 8114
3a9b8c7e
AH
8115 switch (fcode)
8116 {
8117 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8118 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8119 break;
8120 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8121 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8122 break;
8123 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8124 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8125 break;
8126 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8127 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8128 break;
8129 default:
8130 *expandedp = false;
8131 return NULL_RTX;
8132 }
24408032 8133
5039610b
SL
8134 arg0 = CALL_EXPR_ARG (exp, 0);
8135 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8136 op0 = expand_normal (arg0);
8137 op1 = expand_normal (arg1);
3a9b8c7e
AH
8138 mode0 = insn_data[icode].operand[0].mode;
8139 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8140
3a9b8c7e
AH
8141 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8142 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8143 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8144 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8145
3a9b8c7e
AH
8146 pat = GEN_FCN (icode) (op0, op1);
8147 if (pat)
8148 emit_insn (pat);
f18c054f 8149
3a9b8c7e
AH
8150 *expandedp = true;
8151 return NULL_RTX;
8152}
f18c054f 8153
3a9b8c7e
AH
8154/* Expand the dst builtins. */
8155static rtx
f676971a 8156altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8157 bool *expandedp)
3a9b8c7e 8158{
5039610b 8159 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8160 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8161 tree arg0, arg1, arg2;
8162 enum machine_mode mode0, mode1, mode2;
7c3abc73 8163 rtx pat, op0, op1, op2;
586de218 8164 const struct builtin_description *d;
a3170dc6 8165 size_t i;
f18c054f 8166
3a9b8c7e 8167 *expandedp = false;
f18c054f 8168
3a9b8c7e 8169 /* Handle DST variants. */
586de218 8170 d = bdesc_dst;
3a9b8c7e
AH
8171 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8172 if (d->code == fcode)
8173 {
5039610b
SL
8174 arg0 = CALL_EXPR_ARG (exp, 0);
8175 arg1 = CALL_EXPR_ARG (exp, 1);
8176 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8177 op0 = expand_normal (arg0);
8178 op1 = expand_normal (arg1);
8179 op2 = expand_normal (arg2);
3a9b8c7e
AH
8180 mode0 = insn_data[d->icode].operand[0].mode;
8181 mode1 = insn_data[d->icode].operand[1].mode;
8182 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8183
3a9b8c7e
AH
8184 /* Invalid arguments, bail out before generating bad rtl. */
8185 if (arg0 == error_mark_node
8186 || arg1 == error_mark_node
8187 || arg2 == error_mark_node)
8188 return const0_rtx;
f18c054f 8189
86e7df90 8190 *expandedp = true;
8bb418a3 8191 STRIP_NOPS (arg2);
3a9b8c7e
AH
8192 if (TREE_CODE (arg2) != INTEGER_CST
8193 || TREE_INT_CST_LOW (arg2) & ~0x3)
8194 {
9e637a26 8195 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8196 return const0_rtx;
8197 }
f18c054f 8198
3a9b8c7e 8199 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8200 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8201 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8202 op1 = copy_to_mode_reg (mode1, op1);
24408032 8203
3a9b8c7e
AH
8204 pat = GEN_FCN (d->icode) (op0, op1, op2);
8205 if (pat != 0)
8206 emit_insn (pat);
f18c054f 8207
3a9b8c7e
AH
8208 return NULL_RTX;
8209 }
f18c054f 8210
3a9b8c7e
AH
8211 return NULL_RTX;
8212}
24408032 8213
7a4eca66
DE
8214/* Expand vec_init builtin. */
8215static rtx
5039610b 8216altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8217{
8218 enum machine_mode tmode = TYPE_MODE (type);
8219 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8220 int i, n_elt = GET_MODE_NUNITS (tmode);
8221 rtvec v = rtvec_alloc (n_elt);
8222
8223 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8224 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8225
5039610b 8226 for (i = 0; i < n_elt; ++i)
7a4eca66 8227 {
5039610b 8228 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8229 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8230 }
8231
7a4eca66
DE
8232 if (!target || !register_operand (target, tmode))
8233 target = gen_reg_rtx (tmode);
8234
8235 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8236 return target;
8237}
8238
8239/* Return the integer constant in ARG. Constrain it to be in the range
8240 of the subparts of VEC_TYPE; issue an error if not. */
8241
8242static int
8243get_element_number (tree vec_type, tree arg)
8244{
8245 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8246
8247 if (!host_integerp (arg, 1)
8248 || (elt = tree_low_cst (arg, 1), elt > max))
8249 {
8250 error ("selector must be an integer constant in the range 0..%wi", max);
8251 return 0;
8252 }
8253
8254 return elt;
8255}
8256
8257/* Expand vec_set builtin. */
8258static rtx
5039610b 8259altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8260{
8261 enum machine_mode tmode, mode1;
8262 tree arg0, arg1, arg2;
8263 int elt;
8264 rtx op0, op1;
8265
5039610b
SL
8266 arg0 = CALL_EXPR_ARG (exp, 0);
8267 arg1 = CALL_EXPR_ARG (exp, 1);
8268 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8269
8270 tmode = TYPE_MODE (TREE_TYPE (arg0));
8271 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8272 gcc_assert (VECTOR_MODE_P (tmode));
8273
8274 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8275 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8276 elt = get_element_number (TREE_TYPE (arg0), arg2);
8277
8278 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8279 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8280
8281 op0 = force_reg (tmode, op0);
8282 op1 = force_reg (mode1, op1);
8283
8284 rs6000_expand_vector_set (op0, op1, elt);
8285
8286 return op0;
8287}
8288
8289/* Expand vec_ext builtin. */
8290static rtx
5039610b 8291altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8292{
8293 enum machine_mode tmode, mode0;
8294 tree arg0, arg1;
8295 int elt;
8296 rtx op0;
8297
5039610b
SL
8298 arg0 = CALL_EXPR_ARG (exp, 0);
8299 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8300
84217346 8301 op0 = expand_normal (arg0);
7a4eca66
DE
8302 elt = get_element_number (TREE_TYPE (arg0), arg1);
8303
8304 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8305 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8306 gcc_assert (VECTOR_MODE_P (mode0));
8307
8308 op0 = force_reg (mode0, op0);
8309
8310 if (optimize || !target || !register_operand (target, tmode))
8311 target = gen_reg_rtx (tmode);
8312
8313 rs6000_expand_vector_extract (target, op0, elt);
8314
8315 return target;
8316}
8317
3a9b8c7e
AH
8318/* Expand the builtin in EXP and store the result in TARGET. Store
8319 true in *EXPANDEDP if we found a builtin to expand. */
8320static rtx
a2369ed3 8321altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8322{
586de218
KG
8323 const struct builtin_description *d;
8324 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8325 size_t i;
8326 enum insn_code icode;
5039610b 8327 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8328 tree arg0;
8329 rtx op0, pat;
8330 enum machine_mode tmode, mode0;
3a9b8c7e 8331 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8332
58646b77
PB
8333 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8334 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8335 {
8336 *expandedp = true;
ea40ba9c 8337 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8338 return const0_rtx;
8339 }
8340
3a9b8c7e
AH
8341 target = altivec_expand_ld_builtin (exp, target, expandedp);
8342 if (*expandedp)
8343 return target;
0ac081f6 8344
3a9b8c7e
AH
8345 target = altivec_expand_st_builtin (exp, target, expandedp);
8346 if (*expandedp)
8347 return target;
8348
8349 target = altivec_expand_dst_builtin (exp, target, expandedp);
8350 if (*expandedp)
8351 return target;
8352
8353 *expandedp = true;
95385cbb 8354
3a9b8c7e
AH
8355 switch (fcode)
8356 {
6525c0e7 8357 case ALTIVEC_BUILTIN_STVX:
5039610b 8358 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8359 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8360 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8361 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8362 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8363 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8364 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8365 case ALTIVEC_BUILTIN_STVXL:
5039610b 8366 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8367
95385cbb
AH
8368 case ALTIVEC_BUILTIN_MFVSCR:
8369 icode = CODE_FOR_altivec_mfvscr;
8370 tmode = insn_data[icode].operand[0].mode;
8371
8372 if (target == 0
8373 || GET_MODE (target) != tmode
8374 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8375 target = gen_reg_rtx (tmode);
f676971a 8376
95385cbb 8377 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8378 if (! pat)
8379 return 0;
8380 emit_insn (pat);
95385cbb
AH
8381 return target;
8382
8383 case ALTIVEC_BUILTIN_MTVSCR:
8384 icode = CODE_FOR_altivec_mtvscr;
5039610b 8385 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8386 op0 = expand_normal (arg0);
95385cbb
AH
8387 mode0 = insn_data[icode].operand[0].mode;
8388
8389 /* If we got invalid arguments bail out before generating bad rtl. */
8390 if (arg0 == error_mark_node)
9a171fcd 8391 return const0_rtx;
95385cbb
AH
8392
8393 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8394 op0 = copy_to_mode_reg (mode0, op0);
8395
8396 pat = GEN_FCN (icode) (op0);
8397 if (pat)
8398 emit_insn (pat);
8399 return NULL_RTX;
3a9b8c7e 8400
95385cbb
AH
8401 case ALTIVEC_BUILTIN_DSSALL:
8402 emit_insn (gen_altivec_dssall ());
8403 return NULL_RTX;
8404
8405 case ALTIVEC_BUILTIN_DSS:
8406 icode = CODE_FOR_altivec_dss;
5039610b 8407 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8408 STRIP_NOPS (arg0);
84217346 8409 op0 = expand_normal (arg0);
95385cbb
AH
8410 mode0 = insn_data[icode].operand[0].mode;
8411
8412 /* If we got invalid arguments bail out before generating bad rtl. */
8413 if (arg0 == error_mark_node)
9a171fcd 8414 return const0_rtx;
95385cbb 8415
b44140e7
AH
8416 if (TREE_CODE (arg0) != INTEGER_CST
8417 || TREE_INT_CST_LOW (arg0) & ~0x3)
8418 {
8419 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8420 return const0_rtx;
b44140e7
AH
8421 }
8422
95385cbb
AH
8423 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8424 op0 = copy_to_mode_reg (mode0, op0);
8425
8426 emit_insn (gen_altivec_dss (op0));
0ac081f6 8427 return NULL_RTX;
7a4eca66
DE
8428
8429 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8430 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8431 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8432 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8433 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8434
8435 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8436 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8437 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8438 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8439 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8440
8441 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8442 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8443 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8444 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8445 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8446
8447 default:
8448 break;
8449 /* Fall through. */
0ac081f6 8450 }
24408032 8451
100c4561 8452 /* Expand abs* operations. */
586de218 8453 d = bdesc_abs;
ca7558fc 8454 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8455 if (d->code == fcode)
5039610b 8456 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8457
ae4b4a02 8458 /* Expand the AltiVec predicates. */
586de218 8459 dp = bdesc_altivec_preds;
ca7558fc 8460 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8461 if (dp->code == fcode)
c4ad648e 8462 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8463 exp, target);
ae4b4a02 8464
6525c0e7
AH
8465 /* LV* are funky. We initialized them differently. */
8466 switch (fcode)
8467 {
8468 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8469 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8470 exp, target);
6525c0e7 8471 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8472 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8473 exp, target);
6525c0e7 8474 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8475 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8476 exp, target);
6525c0e7 8477 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8478 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8479 exp, target);
6525c0e7 8480 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8481 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8482 exp, target);
6525c0e7 8483 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8484 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8485 exp, target);
6525c0e7 8486 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8487 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8488 exp, target);
6525c0e7
AH
8489 default:
8490 break;
8491 /* Fall through. */
8492 }
95385cbb 8493
92898235 8494 *expandedp = false;
0ac081f6
AH
8495 return NULL_RTX;
8496}
8497
96038623
DE
8498/* Expand the builtin in EXP and store the result in TARGET. Store
8499 true in *EXPANDEDP if we found a builtin to expand. */
8500static rtx
8501paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8502{
8503 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8504 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8505 const struct builtin_description *d;
96038623
DE
8506 size_t i;
8507
8508 *expandedp = true;
8509
8510 switch (fcode)
8511 {
8512 case PAIRED_BUILTIN_STX:
8513 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8514 case PAIRED_BUILTIN_LX:
8515 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8516 default:
8517 break;
8518 /* Fall through. */
8519 }
8520
8521 /* Expand the paired predicates. */
23a651fc 8522 d = bdesc_paired_preds;
96038623
DE
8523 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8524 if (d->code == fcode)
8525 return paired_expand_predicate_builtin (d->icode, exp, target);
8526
8527 *expandedp = false;
8528 return NULL_RTX;
8529}
8530
a3170dc6
AH
8531/* Binops that need to be initialized manually, but can be expanded
8532 automagically by rs6000_expand_binop_builtin. */
8533static struct builtin_description bdesc_2arg_spe[] =
8534{
8535 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8536 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8537 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8538 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8539 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8540 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8541 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8542 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8543 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8544 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8545 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8546 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8547 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8548 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8549 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8550 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8551 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8552 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8553 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8554 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8555 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8556 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8557};
8558
8559/* Expand the builtin in EXP and store the result in TARGET. Store
8560 true in *EXPANDEDP if we found a builtin to expand.
8561
8562 This expands the SPE builtins that are not simple unary and binary
8563 operations. */
8564static rtx
a2369ed3 8565spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8566{
5039610b 8567 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8568 tree arg1, arg0;
8569 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8570 enum insn_code icode;
8571 enum machine_mode tmode, mode0;
8572 rtx pat, op0;
8573 struct builtin_description *d;
8574 size_t i;
8575
8576 *expandedp = true;
8577
8578 /* Syntax check for a 5-bit unsigned immediate. */
8579 switch (fcode)
8580 {
8581 case SPE_BUILTIN_EVSTDD:
8582 case SPE_BUILTIN_EVSTDH:
8583 case SPE_BUILTIN_EVSTDW:
8584 case SPE_BUILTIN_EVSTWHE:
8585 case SPE_BUILTIN_EVSTWHO:
8586 case SPE_BUILTIN_EVSTWWE:
8587 case SPE_BUILTIN_EVSTWWO:
5039610b 8588 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8589 if (TREE_CODE (arg1) != INTEGER_CST
8590 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8591 {
8592 error ("argument 2 must be a 5-bit unsigned literal");
8593 return const0_rtx;
8594 }
8595 break;
8596 default:
8597 break;
8598 }
8599
00332c9f
AH
8600 /* The evsplat*i instructions are not quite generic. */
8601 switch (fcode)
8602 {
8603 case SPE_BUILTIN_EVSPLATFI:
8604 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8605 exp, target);
00332c9f
AH
8606 case SPE_BUILTIN_EVSPLATI:
8607 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8608 exp, target);
00332c9f
AH
8609 default:
8610 break;
8611 }
8612
a3170dc6
AH
8613 d = (struct builtin_description *) bdesc_2arg_spe;
8614 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8615 if (d->code == fcode)
5039610b 8616 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8617
8618 d = (struct builtin_description *) bdesc_spe_predicates;
8619 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8620 if (d->code == fcode)
5039610b 8621 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8622
8623 d = (struct builtin_description *) bdesc_spe_evsel;
8624 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8625 if (d->code == fcode)
5039610b 8626 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8627
8628 switch (fcode)
8629 {
8630 case SPE_BUILTIN_EVSTDDX:
5039610b 8631 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8632 case SPE_BUILTIN_EVSTDHX:
5039610b 8633 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8634 case SPE_BUILTIN_EVSTDWX:
5039610b 8635 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8636 case SPE_BUILTIN_EVSTWHEX:
5039610b 8637 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8638 case SPE_BUILTIN_EVSTWHOX:
5039610b 8639 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8640 case SPE_BUILTIN_EVSTWWEX:
5039610b 8641 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8642 case SPE_BUILTIN_EVSTWWOX:
5039610b 8643 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8644 case SPE_BUILTIN_EVSTDD:
5039610b 8645 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8646 case SPE_BUILTIN_EVSTDH:
5039610b 8647 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8648 case SPE_BUILTIN_EVSTDW:
5039610b 8649 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8650 case SPE_BUILTIN_EVSTWHE:
5039610b 8651 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8652 case SPE_BUILTIN_EVSTWHO:
5039610b 8653 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8654 case SPE_BUILTIN_EVSTWWE:
5039610b 8655 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8656 case SPE_BUILTIN_EVSTWWO:
5039610b 8657 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8658 case SPE_BUILTIN_MFSPEFSCR:
8659 icode = CODE_FOR_spe_mfspefscr;
8660 tmode = insn_data[icode].operand[0].mode;
8661
8662 if (target == 0
8663 || GET_MODE (target) != tmode
8664 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8665 target = gen_reg_rtx (tmode);
f676971a 8666
a3170dc6
AH
8667 pat = GEN_FCN (icode) (target);
8668 if (! pat)
8669 return 0;
8670 emit_insn (pat);
8671 return target;
8672 case SPE_BUILTIN_MTSPEFSCR:
8673 icode = CODE_FOR_spe_mtspefscr;
5039610b 8674 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8675 op0 = expand_normal (arg0);
a3170dc6
AH
8676 mode0 = insn_data[icode].operand[0].mode;
8677
8678 if (arg0 == error_mark_node)
8679 return const0_rtx;
8680
8681 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8682 op0 = copy_to_mode_reg (mode0, op0);
8683
8684 pat = GEN_FCN (icode) (op0);
8685 if (pat)
8686 emit_insn (pat);
8687 return NULL_RTX;
8688 default:
8689 break;
8690 }
8691
8692 *expandedp = false;
8693 return NULL_RTX;
8694}
8695
96038623
DE
8696static rtx
8697paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8698{
8699 rtx pat, scratch, tmp;
8700 tree form = CALL_EXPR_ARG (exp, 0);
8701 tree arg0 = CALL_EXPR_ARG (exp, 1);
8702 tree arg1 = CALL_EXPR_ARG (exp, 2);
8703 rtx op0 = expand_normal (arg0);
8704 rtx op1 = expand_normal (arg1);
8705 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8706 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8707 int form_int;
8708 enum rtx_code code;
8709
8710 if (TREE_CODE (form) != INTEGER_CST)
8711 {
8712 error ("argument 1 of __builtin_paired_predicate must be a constant");
8713 return const0_rtx;
8714 }
8715 else
8716 form_int = TREE_INT_CST_LOW (form);
8717
8718 gcc_assert (mode0 == mode1);
8719
8720 if (arg0 == error_mark_node || arg1 == error_mark_node)
8721 return const0_rtx;
8722
8723 if (target == 0
8724 || GET_MODE (target) != SImode
8725 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8726 target = gen_reg_rtx (SImode);
8727 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8728 op0 = copy_to_mode_reg (mode0, op0);
8729 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8730 op1 = copy_to_mode_reg (mode1, op1);
8731
8732 scratch = gen_reg_rtx (CCFPmode);
8733
8734 pat = GEN_FCN (icode) (scratch, op0, op1);
8735 if (!pat)
8736 return const0_rtx;
8737
8738 emit_insn (pat);
8739
8740 switch (form_int)
8741 {
8742 /* LT bit. */
8743 case 0:
8744 code = LT;
8745 break;
8746 /* GT bit. */
8747 case 1:
8748 code = GT;
8749 break;
8750 /* EQ bit. */
8751 case 2:
8752 code = EQ;
8753 break;
8754 /* UN bit. */
8755 case 3:
8756 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8757 return target;
8758 default:
8759 error ("argument 1 of __builtin_paired_predicate is out of range");
8760 return const0_rtx;
8761 }
8762
8763 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8764 emit_move_insn (target, tmp);
8765 return target;
8766}
8767
a3170dc6 8768static rtx
5039610b 8769spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8770{
8771 rtx pat, scratch, tmp;
5039610b
SL
8772 tree form = CALL_EXPR_ARG (exp, 0);
8773 tree arg0 = CALL_EXPR_ARG (exp, 1);
8774 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8775 rtx op0 = expand_normal (arg0);
8776 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8777 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8778 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8779 int form_int;
8780 enum rtx_code code;
8781
8782 if (TREE_CODE (form) != INTEGER_CST)
8783 {
8784 error ("argument 1 of __builtin_spe_predicate must be a constant");
8785 return const0_rtx;
8786 }
8787 else
8788 form_int = TREE_INT_CST_LOW (form);
8789
37409796 8790 gcc_assert (mode0 == mode1);
a3170dc6
AH
8791
8792 if (arg0 == error_mark_node || arg1 == error_mark_node)
8793 return const0_rtx;
8794
8795 if (target == 0
8796 || GET_MODE (target) != SImode
8797 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8798 target = gen_reg_rtx (SImode);
8799
8800 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8801 op0 = copy_to_mode_reg (mode0, op0);
8802 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8803 op1 = copy_to_mode_reg (mode1, op1);
8804
8805 scratch = gen_reg_rtx (CCmode);
8806
8807 pat = GEN_FCN (icode) (scratch, op0, op1);
8808 if (! pat)
8809 return const0_rtx;
8810 emit_insn (pat);
8811
8812 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8813 _lower_. We use one compare, but look in different bits of the
8814 CR for each variant.
8815
8816 There are 2 elements in each SPE simd type (upper/lower). The CR
8817 bits are set as follows:
8818
8819 BIT0 | BIT 1 | BIT 2 | BIT 3
8820 U | L | (U | L) | (U & L)
8821
8822 So, for an "all" relationship, BIT 3 would be set.
8823 For an "any" relationship, BIT 2 would be set. Etc.
8824
8825 Following traditional nomenclature, these bits map to:
8826
8827 BIT0 | BIT 1 | BIT 2 | BIT 3
8828 LT | GT | EQ | OV
8829
8830 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8831 */
8832
8833 switch (form_int)
8834 {
8835 /* All variant. OV bit. */
8836 case 0:
8837 /* We need to get to the OV bit, which is the ORDERED bit. We
8838 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8839 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8840 So let's just use another pattern. */
8841 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8842 return target;
8843 /* Any variant. EQ bit. */
8844 case 1:
8845 code = EQ;
8846 break;
8847 /* Upper variant. LT bit. */
8848 case 2:
8849 code = LT;
8850 break;
8851 /* Lower variant. GT bit. */
8852 case 3:
8853 code = GT;
8854 break;
8855 default:
8856 error ("argument 1 of __builtin_spe_predicate is out of range");
8857 return const0_rtx;
8858 }
8859
8860 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8861 emit_move_insn (target, tmp);
8862
8863 return target;
8864}
8865
8866/* The evsel builtins look like this:
8867
8868 e = __builtin_spe_evsel_OP (a, b, c, d);
8869
8870 and work like this:
8871
8872 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8873 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8874*/
8875
8876static rtx
5039610b 8877spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8878{
8879 rtx pat, scratch;
5039610b
SL
8880 tree arg0 = CALL_EXPR_ARG (exp, 0);
8881 tree arg1 = CALL_EXPR_ARG (exp, 1);
8882 tree arg2 = CALL_EXPR_ARG (exp, 2);
8883 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8884 rtx op0 = expand_normal (arg0);
8885 rtx op1 = expand_normal (arg1);
8886 rtx op2 = expand_normal (arg2);
8887 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8888 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8889 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8890
37409796 8891 gcc_assert (mode0 == mode1);
a3170dc6
AH
8892
8893 if (arg0 == error_mark_node || arg1 == error_mark_node
8894 || arg2 == error_mark_node || arg3 == error_mark_node)
8895 return const0_rtx;
8896
8897 if (target == 0
8898 || GET_MODE (target) != mode0
8899 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8900 target = gen_reg_rtx (mode0);
8901
8902 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8903 op0 = copy_to_mode_reg (mode0, op0);
8904 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8905 op1 = copy_to_mode_reg (mode0, op1);
8906 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8907 op2 = copy_to_mode_reg (mode0, op2);
8908 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8909 op3 = copy_to_mode_reg (mode0, op3);
8910
8911 /* Generate the compare. */
8912 scratch = gen_reg_rtx (CCmode);
8913 pat = GEN_FCN (icode) (scratch, op0, op1);
8914 if (! pat)
8915 return const0_rtx;
8916 emit_insn (pat);
8917
8918 if (mode0 == V2SImode)
8919 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8920 else
8921 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8922
8923 return target;
8924}
8925
0ac081f6
AH
8926/* Expand an expression EXP that calls a built-in function,
8927 with result going to TARGET if that's convenient
8928 (and in mode MODE if that's convenient).
8929 SUBTARGET may be used as the target for computing one of EXP's operands.
8930 IGNORE is nonzero if the value is to be ignored. */
8931
8932static rtx
a2369ed3 8933rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8934 enum machine_mode mode ATTRIBUTE_UNUSED,
8935 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8936{
5039610b 8937 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8938 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8939 const struct builtin_description *d;
92898235
AH
8940 size_t i;
8941 rtx ret;
8942 bool success;
f676971a 8943
9c78b944
DE
8944 if (fcode == RS6000_BUILTIN_RECIP)
8945 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8946
8947 if (fcode == RS6000_BUILTIN_RECIPF)
8948 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8949
8950 if (fcode == RS6000_BUILTIN_RSQRTF)
8951 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8952
7ccf35ed
DN
8953 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8954 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8955 {
8956 int icode = (int) CODE_FOR_altivec_lvsr;
8957 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8958 enum machine_mode mode = insn_data[icode].operand[1].mode;
8959 tree arg;
8960 rtx op, addr, pat;
8961
37409796 8962 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8963
5039610b 8964 arg = CALL_EXPR_ARG (exp, 0);
37409796 8965 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8966 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8967 addr = memory_address (mode, op);
8968 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8969 op = addr;
8970 else
8971 {
8972 /* For the load case need to negate the address. */
8973 op = gen_reg_rtx (GET_MODE (addr));
8974 emit_insn (gen_rtx_SET (VOIDmode, op,
8975 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8976 }
7ccf35ed
DN
8977 op = gen_rtx_MEM (mode, op);
8978
8979 if (target == 0
8980 || GET_MODE (target) != tmode
8981 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8982 target = gen_reg_rtx (tmode);
8983
8984 /*pat = gen_altivec_lvsr (target, op);*/
8985 pat = GEN_FCN (icode) (target, op);
8986 if (!pat)
8987 return 0;
8988 emit_insn (pat);
8989
8990 return target;
8991 }
5039610b
SL
8992
8993 /* FIXME: There's got to be a nicer way to handle this case than
8994 constructing a new CALL_EXPR. */
f57d17f1
TM
8995 if (fcode == ALTIVEC_BUILTIN_VCFUX
8996 || fcode == ALTIVEC_BUILTIN_VCFSX)
8997 {
5039610b
SL
8998 if (call_expr_nargs (exp) == 1)
8999 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9000 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9001 }
7ccf35ed 9002
0ac081f6 9003 if (TARGET_ALTIVEC)
92898235
AH
9004 {
9005 ret = altivec_expand_builtin (exp, target, &success);
9006
a3170dc6
AH
9007 if (success)
9008 return ret;
9009 }
9010 if (TARGET_SPE)
9011 {
9012 ret = spe_expand_builtin (exp, target, &success);
9013
92898235
AH
9014 if (success)
9015 return ret;
9016 }
96038623
DE
9017 if (TARGET_PAIRED_FLOAT)
9018 {
9019 ret = paired_expand_builtin (exp, target, &success);
9020
9021 if (success)
9022 return ret;
9023 }
92898235 9024
96038623 9025 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9026
37409796
NS
9027 /* Handle simple unary operations. */
9028 d = (struct builtin_description *) bdesc_1arg;
9029 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9030 if (d->code == fcode)
5039610b 9031 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9032
37409796
NS
9033 /* Handle simple binary operations. */
9034 d = (struct builtin_description *) bdesc_2arg;
9035 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9036 if (d->code == fcode)
5039610b 9037 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9038
37409796 9039 /* Handle simple ternary operations. */
586de218 9040 d = bdesc_3arg;
37409796
NS
9041 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9042 if (d->code == fcode)
5039610b 9043 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9044
37409796 9045 gcc_unreachable ();
0ac081f6
AH
9046}
9047
7c62e993
PB
9048static tree
9049build_opaque_vector_type (tree node, int nunits)
9050{
9051 node = copy_node (node);
9052 TYPE_MAIN_VARIANT (node) = node;
9053 return build_vector_type (node, nunits);
9054}
9055
0ac081f6 9056static void
863d938c 9057rs6000_init_builtins (void)
0ac081f6 9058{
4a5eab38
PB
9059 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9060 V2SF_type_node = build_vector_type (float_type_node, 2);
9061 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9062 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9063 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9064 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9065 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9066
9067 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9068 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9069 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9070
7c62e993
PB
9071 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9072 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9073 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9074 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9075
8bb418a3
ZL
9076 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9077 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9078 'vector unsigned short'. */
9079
8dd16ecc
NS
9080 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9081 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9082 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9083 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9084
58646b77
PB
9085 long_integer_type_internal_node = long_integer_type_node;
9086 long_unsigned_type_internal_node = long_unsigned_type_node;
9087 intQI_type_internal_node = intQI_type_node;
9088 uintQI_type_internal_node = unsigned_intQI_type_node;
9089 intHI_type_internal_node = intHI_type_node;
9090 uintHI_type_internal_node = unsigned_intHI_type_node;
9091 intSI_type_internal_node = intSI_type_node;
9092 uintSI_type_internal_node = unsigned_intSI_type_node;
9093 float_type_internal_node = float_type_node;
9094 void_type_internal_node = void_type_node;
9095
8bb418a3
ZL
9096 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9097 get_identifier ("__bool char"),
9098 bool_char_type_node));
9099 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9100 get_identifier ("__bool short"),
9101 bool_short_type_node));
9102 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9103 get_identifier ("__bool int"),
9104 bool_int_type_node));
9105 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9106 get_identifier ("__pixel"),
9107 pixel_type_node));
9108
4a5eab38
PB
9109 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9110 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9111 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9112 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9113
9114 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9115 get_identifier ("__vector unsigned char"),
9116 unsigned_V16QI_type_node));
9117 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9118 get_identifier ("__vector signed char"),
9119 V16QI_type_node));
9120 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9121 get_identifier ("__vector __bool char"),
9122 bool_V16QI_type_node));
9123
9124 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9125 get_identifier ("__vector unsigned short"),
9126 unsigned_V8HI_type_node));
9127 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9128 get_identifier ("__vector signed short"),
9129 V8HI_type_node));
9130 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9131 get_identifier ("__vector __bool short"),
9132 bool_V8HI_type_node));
9133
9134 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9135 get_identifier ("__vector unsigned int"),
9136 unsigned_V4SI_type_node));
9137 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9138 get_identifier ("__vector signed int"),
9139 V4SI_type_node));
9140 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9141 get_identifier ("__vector __bool int"),
9142 bool_V4SI_type_node));
9143
9144 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9145 get_identifier ("__vector float"),
9146 V4SF_type_node));
9147 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9148 get_identifier ("__vector __pixel"),
9149 pixel_V8HI_type_node));
9150
96038623
DE
9151 if (TARGET_PAIRED_FLOAT)
9152 paired_init_builtins ();
a3170dc6 9153 if (TARGET_SPE)
3fdaa45a 9154 spe_init_builtins ();
0ac081f6
AH
9155 if (TARGET_ALTIVEC)
9156 altivec_init_builtins ();
96038623 9157 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9158 rs6000_common_init_builtins ();
9c78b944
DE
9159 if (TARGET_PPC_GFXOPT)
9160 {
9161 tree ftype = build_function_type_list (float_type_node,
9162 float_type_node,
9163 float_type_node,
9164 NULL_TREE);
9165 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9166 RS6000_BUILTIN_RECIPF);
9167
9168 ftype = build_function_type_list (float_type_node,
9169 float_type_node,
9170 NULL_TREE);
9171 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9172 RS6000_BUILTIN_RSQRTF);
9173 }
9174 if (TARGET_POPCNTB)
9175 {
9176 tree ftype = build_function_type_list (double_type_node,
9177 double_type_node,
9178 double_type_node,
9179 NULL_TREE);
9180 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9181 RS6000_BUILTIN_RECIP);
9182
9183 }
69ca3549
DE
9184
9185#if TARGET_XCOFF
9186 /* AIX libm provides clog as __clog. */
9187 if (built_in_decls [BUILT_IN_CLOG])
9188 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9189#endif
0ac081f6
AH
9190}
9191
a3170dc6
AH
9192/* Search through a set of builtins and enable the mask bits.
9193 DESC is an array of builtins.
b6d08ca1 9194 SIZE is the total number of builtins.
a3170dc6
AH
9195 START is the builtin enum at which to start.
9196 END is the builtin enum at which to end. */
0ac081f6 9197static void
a2369ed3 9198enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9199 enum rs6000_builtins start,
a2369ed3 9200 enum rs6000_builtins end)
a3170dc6
AH
9201{
9202 int i;
9203
9204 for (i = 0; i < size; ++i)
9205 if (desc[i].code == start)
9206 break;
9207
9208 if (i == size)
9209 return;
9210
9211 for (; i < size; ++i)
9212 {
9213 /* Flip all the bits on. */
9214 desc[i].mask = target_flags;
9215 if (desc[i].code == end)
9216 break;
9217 }
9218}
9219
9220static void
863d938c 9221spe_init_builtins (void)
0ac081f6 9222{
a3170dc6
AH
9223 tree endlink = void_list_node;
9224 tree puint_type_node = build_pointer_type (unsigned_type_node);
9225 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9226 struct builtin_description *d;
0ac081f6
AH
9227 size_t i;
9228
a3170dc6
AH
9229 tree v2si_ftype_4_v2si
9230 = build_function_type
3fdaa45a
AH
9231 (opaque_V2SI_type_node,
9232 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9233 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9234 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9235 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9236 endlink)))));
9237
9238 tree v2sf_ftype_4_v2sf
9239 = build_function_type
3fdaa45a
AH
9240 (opaque_V2SF_type_node,
9241 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9242 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9243 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9244 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9245 endlink)))));
9246
9247 tree int_ftype_int_v2si_v2si
9248 = build_function_type
9249 (integer_type_node,
9250 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9251 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9252 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9253 endlink))));
9254
9255 tree int_ftype_int_v2sf_v2sf
9256 = build_function_type
9257 (integer_type_node,
9258 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9259 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9260 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9261 endlink))));
9262
9263 tree void_ftype_v2si_puint_int
9264 = build_function_type (void_type_node,
3fdaa45a 9265 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9266 tree_cons (NULL_TREE, puint_type_node,
9267 tree_cons (NULL_TREE,
9268 integer_type_node,
9269 endlink))));
9270
9271 tree void_ftype_v2si_puint_char
9272 = build_function_type (void_type_node,
3fdaa45a 9273 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9274 tree_cons (NULL_TREE, puint_type_node,
9275 tree_cons (NULL_TREE,
9276 char_type_node,
9277 endlink))));
9278
9279 tree void_ftype_v2si_pv2si_int
9280 = build_function_type (void_type_node,
3fdaa45a 9281 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9282 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9283 tree_cons (NULL_TREE,
9284 integer_type_node,
9285 endlink))));
9286
9287 tree void_ftype_v2si_pv2si_char
9288 = build_function_type (void_type_node,
3fdaa45a 9289 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9290 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9291 tree_cons (NULL_TREE,
9292 char_type_node,
9293 endlink))));
9294
9295 tree void_ftype_int
9296 = build_function_type (void_type_node,
9297 tree_cons (NULL_TREE, integer_type_node, endlink));
9298
9299 tree int_ftype_void
36e8d515 9300 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9301
9302 tree v2si_ftype_pv2si_int
3fdaa45a 9303 = build_function_type (opaque_V2SI_type_node,
6035d635 9304 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9305 tree_cons (NULL_TREE, integer_type_node,
9306 endlink)));
9307
9308 tree v2si_ftype_puint_int
3fdaa45a 9309 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9310 tree_cons (NULL_TREE, puint_type_node,
9311 tree_cons (NULL_TREE, integer_type_node,
9312 endlink)));
9313
9314 tree v2si_ftype_pushort_int
3fdaa45a 9315 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9316 tree_cons (NULL_TREE, pushort_type_node,
9317 tree_cons (NULL_TREE, integer_type_node,
9318 endlink)));
9319
00332c9f
AH
9320 tree v2si_ftype_signed_char
9321 = build_function_type (opaque_V2SI_type_node,
9322 tree_cons (NULL_TREE, signed_char_type_node,
9323 endlink));
9324
a3170dc6
AH
9325 /* The initialization of the simple binary and unary builtins is
9326 done in rs6000_common_init_builtins, but we have to enable the
9327 mask bits here manually because we have run out of `target_flags'
9328 bits. We really need to redesign this mask business. */
9329
9330 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9331 ARRAY_SIZE (bdesc_2arg),
9332 SPE_BUILTIN_EVADDW,
9333 SPE_BUILTIN_EVXOR);
9334 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9335 ARRAY_SIZE (bdesc_1arg),
9336 SPE_BUILTIN_EVABS,
9337 SPE_BUILTIN_EVSUBFUSIAAW);
9338 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9339 ARRAY_SIZE (bdesc_spe_predicates),
9340 SPE_BUILTIN_EVCMPEQ,
9341 SPE_BUILTIN_EVFSTSTLT);
9342 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9343 ARRAY_SIZE (bdesc_spe_evsel),
9344 SPE_BUILTIN_EVSEL_CMPGTS,
9345 SPE_BUILTIN_EVSEL_FSTSTEQ);
9346
36252949
AH
9347 (*lang_hooks.decls.pushdecl)
9348 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9349 opaque_V2SI_type_node));
9350
a3170dc6 9351 /* Initialize irregular SPE builtins. */
f676971a 9352
a3170dc6
AH
9353 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9354 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9355 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9356 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9357 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9358 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9359 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9360 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9361 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9362 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9363 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9364 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9365 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9366 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9367 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9368 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9369 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9370 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9371
9372 /* Loads. */
9373 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9374 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9375 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9376 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9377 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9378 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9379 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9380 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9381 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9382 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9383 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9384 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9385 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9386 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9387 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9388 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9389 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9390 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9391 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9392 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9393 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9394 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9395
9396 /* Predicates. */
9397 d = (struct builtin_description *) bdesc_spe_predicates;
9398 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9399 {
9400 tree type;
9401
9402 switch (insn_data[d->icode].operand[1].mode)
9403 {
9404 case V2SImode:
9405 type = int_ftype_int_v2si_v2si;
9406 break;
9407 case V2SFmode:
9408 type = int_ftype_int_v2sf_v2sf;
9409 break;
9410 default:
37409796 9411 gcc_unreachable ();
a3170dc6
AH
9412 }
9413
9414 def_builtin (d->mask, d->name, type, d->code);
9415 }
9416
9417 /* Evsel predicates. */
9418 d = (struct builtin_description *) bdesc_spe_evsel;
9419 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9420 {
9421 tree type;
9422
9423 switch (insn_data[d->icode].operand[1].mode)
9424 {
9425 case V2SImode:
9426 type = v2si_ftype_4_v2si;
9427 break;
9428 case V2SFmode:
9429 type = v2sf_ftype_4_v2sf;
9430 break;
9431 default:
37409796 9432 gcc_unreachable ();
a3170dc6
AH
9433 }
9434
9435 def_builtin (d->mask, d->name, type, d->code);
9436 }
9437}
9438
96038623
DE
9439static void
9440paired_init_builtins (void)
9441{
23a651fc 9442 const struct builtin_description *d;
96038623
DE
9443 size_t i;
9444 tree endlink = void_list_node;
9445
9446 tree int_ftype_int_v2sf_v2sf
9447 = build_function_type
9448 (integer_type_node,
9449 tree_cons (NULL_TREE, integer_type_node,
9450 tree_cons (NULL_TREE, V2SF_type_node,
9451 tree_cons (NULL_TREE, V2SF_type_node,
9452 endlink))));
9453 tree pcfloat_type_node =
9454 build_pointer_type (build_qualified_type
9455 (float_type_node, TYPE_QUAL_CONST));
9456
9457 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9458 long_integer_type_node,
9459 pcfloat_type_node,
9460 NULL_TREE);
9461 tree void_ftype_v2sf_long_pcfloat =
9462 build_function_type_list (void_type_node,
9463 V2SF_type_node,
9464 long_integer_type_node,
9465 pcfloat_type_node,
9466 NULL_TREE);
9467
9468
9469 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9470 PAIRED_BUILTIN_LX);
9471
9472
9473 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9474 PAIRED_BUILTIN_STX);
9475
9476 /* Predicates. */
23a651fc 9477 d = bdesc_paired_preds;
96038623
DE
9478 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9479 {
9480 tree type;
9481
9482 switch (insn_data[d->icode].operand[1].mode)
9483 {
9484 case V2SFmode:
9485 type = int_ftype_int_v2sf_v2sf;
9486 break;
9487 default:
9488 gcc_unreachable ();
9489 }
9490
9491 def_builtin (d->mask, d->name, type, d->code);
9492 }
9493}
9494
a3170dc6 9495static void
863d938c 9496altivec_init_builtins (void)
a3170dc6 9497{
586de218
KG
9498 const struct builtin_description *d;
9499 const struct builtin_description_predicates *dp;
a3170dc6 9500 size_t i;
7a4eca66
DE
9501 tree ftype;
9502
a3170dc6
AH
9503 tree pfloat_type_node = build_pointer_type (float_type_node);
9504 tree pint_type_node = build_pointer_type (integer_type_node);
9505 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9506 tree pchar_type_node = build_pointer_type (char_type_node);
9507
9508 tree pvoid_type_node = build_pointer_type (void_type_node);
9509
0dbc3651
ZW
9510 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9511 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9512 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9513 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9514
9515 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9516
58646b77
PB
9517 tree int_ftype_opaque
9518 = build_function_type_list (integer_type_node,
9519 opaque_V4SI_type_node, NULL_TREE);
9520
9521 tree opaque_ftype_opaque_int
9522 = build_function_type_list (opaque_V4SI_type_node,
9523 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9524 tree opaque_ftype_opaque_opaque_int
9525 = build_function_type_list (opaque_V4SI_type_node,
9526 opaque_V4SI_type_node, opaque_V4SI_type_node,
9527 integer_type_node, NULL_TREE);
9528 tree int_ftype_int_opaque_opaque
9529 = build_function_type_list (integer_type_node,
9530 integer_type_node, opaque_V4SI_type_node,
9531 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9532 tree int_ftype_int_v4si_v4si
9533 = build_function_type_list (integer_type_node,
9534 integer_type_node, V4SI_type_node,
9535 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9536 tree v4sf_ftype_pcfloat
9537 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9538 tree void_ftype_pfloat_v4sf
b4de2f7d 9539 = build_function_type_list (void_type_node,
a3170dc6 9540 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9541 tree v4si_ftype_pcint
9542 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9543 tree void_ftype_pint_v4si
b4de2f7d
AH
9544 = build_function_type_list (void_type_node,
9545 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9546 tree v8hi_ftype_pcshort
9547 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9548 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9549 = build_function_type_list (void_type_node,
9550 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9551 tree v16qi_ftype_pcchar
9552 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9553 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9554 = build_function_type_list (void_type_node,
9555 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9556 tree void_ftype_v4si
b4de2f7d 9557 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9558 tree v8hi_ftype_void
9559 = build_function_type (V8HI_type_node, void_list_node);
9560 tree void_ftype_void
9561 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9562 tree void_ftype_int
9563 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9564
58646b77
PB
9565 tree opaque_ftype_long_pcvoid
9566 = build_function_type_list (opaque_V4SI_type_node,
9567 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9568 tree v16qi_ftype_long_pcvoid
a3170dc6 9569 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9570 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9571 tree v8hi_ftype_long_pcvoid
a3170dc6 9572 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9573 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9574 tree v4si_ftype_long_pcvoid
a3170dc6 9575 = build_function_type_list (V4SI_type_node,
b4a62fa0 9576 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9577
58646b77
PB
9578 tree void_ftype_opaque_long_pvoid
9579 = build_function_type_list (void_type_node,
9580 opaque_V4SI_type_node, long_integer_type_node,
9581 pvoid_type_node, NULL_TREE);
b4a62fa0 9582 tree void_ftype_v4si_long_pvoid
b4de2f7d 9583 = build_function_type_list (void_type_node,
b4a62fa0 9584 V4SI_type_node, long_integer_type_node,
b4de2f7d 9585 pvoid_type_node, NULL_TREE);
b4a62fa0 9586 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9587 = build_function_type_list (void_type_node,
b4a62fa0 9588 V16QI_type_node, long_integer_type_node,
b4de2f7d 9589 pvoid_type_node, NULL_TREE);
b4a62fa0 9590 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9591 = build_function_type_list (void_type_node,
b4a62fa0 9592 V8HI_type_node, long_integer_type_node,
b4de2f7d 9593 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9594 tree int_ftype_int_v8hi_v8hi
9595 = build_function_type_list (integer_type_node,
9596 integer_type_node, V8HI_type_node,
9597 V8HI_type_node, NULL_TREE);
9598 tree int_ftype_int_v16qi_v16qi
9599 = build_function_type_list (integer_type_node,
9600 integer_type_node, V16QI_type_node,
9601 V16QI_type_node, NULL_TREE);
9602 tree int_ftype_int_v4sf_v4sf
9603 = build_function_type_list (integer_type_node,
9604 integer_type_node, V4SF_type_node,
9605 V4SF_type_node, NULL_TREE);
9606 tree v4si_ftype_v4si
9607 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9608 tree v8hi_ftype_v8hi
9609 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9610 tree v16qi_ftype_v16qi
9611 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9612 tree v4sf_ftype_v4sf
9613 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9614 tree void_ftype_pcvoid_int_int
a3170dc6 9615 = build_function_type_list (void_type_node,
0dbc3651 9616 pcvoid_type_node, integer_type_node,
8bb418a3 9617 integer_type_node, NULL_TREE);
8bb418a3 9618
0dbc3651
ZW
9619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9620 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9622 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9624 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9625 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9626 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9627 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9628 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9630 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9631 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9632 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9633 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9634 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9635 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9637 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9639 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9641 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9642 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9643 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9644 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9645 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9646 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9647 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9648 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9649 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9650 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9651 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9652 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9653 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9654 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9655 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9656 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9657 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9658 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9659 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9660 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9661 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9662 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9663 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9664 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9665
9666 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9667
9668 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9669 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9670 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9671 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9672 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9673 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9674 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9675 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9676 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9677 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9678
a3170dc6 9679 /* Add the DST variants. */
586de218 9680 d = bdesc_dst;
a3170dc6 9681 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9682 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9683
9684 /* Initialize the predicates. */
586de218 9685 dp = bdesc_altivec_preds;
a3170dc6
AH
9686 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9687 {
9688 enum machine_mode mode1;
9689 tree type;
58646b77
PB
9690 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9691 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9692
58646b77
PB
9693 if (is_overloaded)
9694 mode1 = VOIDmode;
9695 else
9696 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9697
9698 switch (mode1)
9699 {
58646b77
PB
9700 case VOIDmode:
9701 type = int_ftype_int_opaque_opaque;
9702 break;
a3170dc6
AH
9703 case V4SImode:
9704 type = int_ftype_int_v4si_v4si;
9705 break;
9706 case V8HImode:
9707 type = int_ftype_int_v8hi_v8hi;
9708 break;
9709 case V16QImode:
9710 type = int_ftype_int_v16qi_v16qi;
9711 break;
9712 case V4SFmode:
9713 type = int_ftype_int_v4sf_v4sf;
9714 break;
9715 default:
37409796 9716 gcc_unreachable ();
a3170dc6 9717 }
f676971a 9718
a3170dc6
AH
9719 def_builtin (dp->mask, dp->name, type, dp->code);
9720 }
9721
9722 /* Initialize the abs* operators. */
586de218 9723 d = bdesc_abs;
a3170dc6
AH
9724 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9725 {
9726 enum machine_mode mode0;
9727 tree type;
9728
9729 mode0 = insn_data[d->icode].operand[0].mode;
9730
9731 switch (mode0)
9732 {
9733 case V4SImode:
9734 type = v4si_ftype_v4si;
9735 break;
9736 case V8HImode:
9737 type = v8hi_ftype_v8hi;
9738 break;
9739 case V16QImode:
9740 type = v16qi_ftype_v16qi;
9741 break;
9742 case V4SFmode:
9743 type = v4sf_ftype_v4sf;
9744 break;
9745 default:
37409796 9746 gcc_unreachable ();
a3170dc6 9747 }
f676971a 9748
a3170dc6
AH
9749 def_builtin (d->mask, d->name, type, d->code);
9750 }
7ccf35ed 9751
13c62176
DN
9752 if (TARGET_ALTIVEC)
9753 {
9754 tree decl;
9755
9756 /* Initialize target builtin that implements
9757 targetm.vectorize.builtin_mask_for_load. */
9758
c79efc4d
RÁE
9759 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9760 v16qi_ftype_long_pcvoid,
9761 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9762 BUILT_IN_MD, NULL, NULL_TREE);
9763 TREE_READONLY (decl) = 1;
13c62176
DN
9764 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9765 altivec_builtin_mask_for_load = decl;
13c62176 9766 }
7a4eca66
DE
9767
9768 /* Access to the vec_init patterns. */
9769 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9770 integer_type_node, integer_type_node,
9771 integer_type_node, NULL_TREE);
9772 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9773 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9774
9775 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9776 short_integer_type_node,
9777 short_integer_type_node,
9778 short_integer_type_node,
9779 short_integer_type_node,
9780 short_integer_type_node,
9781 short_integer_type_node,
9782 short_integer_type_node, NULL_TREE);
9783 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9784 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9785
9786 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9787 char_type_node, char_type_node,
9788 char_type_node, char_type_node,
9789 char_type_node, char_type_node,
9790 char_type_node, char_type_node,
9791 char_type_node, char_type_node,
9792 char_type_node, char_type_node,
9793 char_type_node, char_type_node,
9794 char_type_node, NULL_TREE);
9795 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9796 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9797
9798 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9799 float_type_node, float_type_node,
9800 float_type_node, NULL_TREE);
9801 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9802 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9803
9804 /* Access to the vec_set patterns. */
9805 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9806 intSI_type_node,
9807 integer_type_node, NULL_TREE);
9808 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9809 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9810
9811 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9812 intHI_type_node,
9813 integer_type_node, NULL_TREE);
9814 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9815 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9816
9817 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9818 intQI_type_node,
9819 integer_type_node, NULL_TREE);
9820 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9821 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9822
9823 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9824 float_type_node,
9825 integer_type_node, NULL_TREE);
9826 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9827 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9828
9829 /* Access to the vec_extract patterns. */
9830 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9831 integer_type_node, NULL_TREE);
9832 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9833 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9834
9835 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9836 integer_type_node, NULL_TREE);
9837 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9838 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9839
9840 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9841 integer_type_node, NULL_TREE);
9842 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9843 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9844
9845 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9846 integer_type_node, NULL_TREE);
9847 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9848 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9849}
9850
9851static void
863d938c 9852rs6000_common_init_builtins (void)
a3170dc6 9853{
586de218 9854 const struct builtin_description *d;
a3170dc6
AH
9855 size_t i;
9856
96038623
DE
9857 tree v2sf_ftype_v2sf_v2sf_v2sf
9858 = build_function_type_list (V2SF_type_node,
9859 V2SF_type_node, V2SF_type_node,
9860 V2SF_type_node, NULL_TREE);
9861
a3170dc6
AH
9862 tree v4sf_ftype_v4sf_v4sf_v16qi
9863 = build_function_type_list (V4SF_type_node,
9864 V4SF_type_node, V4SF_type_node,
9865 V16QI_type_node, NULL_TREE);
9866 tree v4si_ftype_v4si_v4si_v16qi
9867 = build_function_type_list (V4SI_type_node,
9868 V4SI_type_node, V4SI_type_node,
9869 V16QI_type_node, NULL_TREE);
9870 tree v8hi_ftype_v8hi_v8hi_v16qi
9871 = build_function_type_list (V8HI_type_node,
9872 V8HI_type_node, V8HI_type_node,
9873 V16QI_type_node, NULL_TREE);
9874 tree v16qi_ftype_v16qi_v16qi_v16qi
9875 = build_function_type_list (V16QI_type_node,
9876 V16QI_type_node, V16QI_type_node,
9877 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9878 tree v4si_ftype_int
9879 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9880 tree v8hi_ftype_int
9881 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9882 tree v16qi_ftype_int
9883 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9884 tree v8hi_ftype_v16qi
9885 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9886 tree v4sf_ftype_v4sf
9887 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9888
9889 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9890 = build_function_type_list (opaque_V2SI_type_node,
9891 opaque_V2SI_type_node,
9892 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9893
96038623 9894 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9895 = build_function_type_list (opaque_V2SF_type_node,
9896 opaque_V2SF_type_node,
9897 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9898
96038623
DE
9899 tree v2sf_ftype_v2sf_v2sf
9900 = build_function_type_list (V2SF_type_node,
9901 V2SF_type_node,
9902 V2SF_type_node, NULL_TREE);
9903
9904
a3170dc6 9905 tree v2si_ftype_int_int
2abe3e28 9906 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9907 integer_type_node, integer_type_node,
9908 NULL_TREE);
9909
58646b77
PB
9910 tree opaque_ftype_opaque
9911 = build_function_type_list (opaque_V4SI_type_node,
9912 opaque_V4SI_type_node, NULL_TREE);
9913
a3170dc6 9914 tree v2si_ftype_v2si
2abe3e28
AH
9915 = build_function_type_list (opaque_V2SI_type_node,
9916 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9917
96038623 9918 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9919 = build_function_type_list (opaque_V2SF_type_node,
9920 opaque_V2SF_type_node, NULL_TREE);
f676971a 9921
96038623
DE
9922 tree v2sf_ftype_v2sf
9923 = build_function_type_list (V2SF_type_node,
9924 V2SF_type_node, NULL_TREE);
9925
a3170dc6 9926 tree v2sf_ftype_v2si
2abe3e28
AH
9927 = build_function_type_list (opaque_V2SF_type_node,
9928 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9929
9930 tree v2si_ftype_v2sf
2abe3e28
AH
9931 = build_function_type_list (opaque_V2SI_type_node,
9932 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9933
9934 tree v2si_ftype_v2si_char
2abe3e28
AH
9935 = build_function_type_list (opaque_V2SI_type_node,
9936 opaque_V2SI_type_node,
9937 char_type_node, NULL_TREE);
a3170dc6
AH
9938
9939 tree v2si_ftype_int_char
2abe3e28 9940 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9941 integer_type_node, char_type_node, NULL_TREE);
9942
9943 tree v2si_ftype_char
2abe3e28
AH
9944 = build_function_type_list (opaque_V2SI_type_node,
9945 char_type_node, NULL_TREE);
a3170dc6
AH
9946
9947 tree int_ftype_int_int
9948 = build_function_type_list (integer_type_node,
9949 integer_type_node, integer_type_node,
9950 NULL_TREE);
95385cbb 9951
58646b77
PB
9952 tree opaque_ftype_opaque_opaque
9953 = build_function_type_list (opaque_V4SI_type_node,
9954 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9955 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9956 = build_function_type_list (V4SI_type_node,
9957 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9958 tree v4sf_ftype_v4si_int
b4de2f7d 9959 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9960 V4SI_type_node, integer_type_node, NULL_TREE);
9961 tree v4si_ftype_v4sf_int
b4de2f7d 9962 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9963 V4SF_type_node, integer_type_node, NULL_TREE);
9964 tree v4si_ftype_v4si_int
b4de2f7d 9965 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9966 V4SI_type_node, integer_type_node, NULL_TREE);
9967 tree v8hi_ftype_v8hi_int
b4de2f7d 9968 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9969 V8HI_type_node, integer_type_node, NULL_TREE);
9970 tree v16qi_ftype_v16qi_int
b4de2f7d 9971 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9972 V16QI_type_node, integer_type_node, NULL_TREE);
9973 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9974 = build_function_type_list (V16QI_type_node,
9975 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9976 integer_type_node, NULL_TREE);
9977 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9978 = build_function_type_list (V8HI_type_node,
9979 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9980 integer_type_node, NULL_TREE);
9981 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9982 = build_function_type_list (V4SI_type_node,
9983 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9984 integer_type_node, NULL_TREE);
9985 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9986 = build_function_type_list (V4SF_type_node,
9987 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9988 integer_type_node, NULL_TREE);
0ac081f6 9989 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9990 = build_function_type_list (V4SF_type_node,
9991 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9992 tree opaque_ftype_opaque_opaque_opaque
9993 = build_function_type_list (opaque_V4SI_type_node,
9994 opaque_V4SI_type_node, opaque_V4SI_type_node,
9995 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9996 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9997 = build_function_type_list (V4SF_type_node,
9998 V4SF_type_node, V4SF_type_node,
9999 V4SI_type_node, NULL_TREE);
2212663f 10000 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10001 = build_function_type_list (V4SF_type_node,
10002 V4SF_type_node, V4SF_type_node,
10003 V4SF_type_node, NULL_TREE);
f676971a 10004 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10005 = build_function_type_list (V4SI_type_node,
10006 V4SI_type_node, V4SI_type_node,
10007 V4SI_type_node, NULL_TREE);
0ac081f6 10008 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10009 = build_function_type_list (V8HI_type_node,
10010 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10011 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10012 = build_function_type_list (V8HI_type_node,
10013 V8HI_type_node, V8HI_type_node,
10014 V8HI_type_node, NULL_TREE);
c4ad648e 10015 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10016 = build_function_type_list (V4SI_type_node,
10017 V8HI_type_node, V8HI_type_node,
10018 V4SI_type_node, NULL_TREE);
c4ad648e 10019 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10020 = build_function_type_list (V4SI_type_node,
10021 V16QI_type_node, V16QI_type_node,
10022 V4SI_type_node, NULL_TREE);
0ac081f6 10023 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10024 = build_function_type_list (V16QI_type_node,
10025 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10026 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10027 = build_function_type_list (V4SI_type_node,
10028 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10029 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10030 = build_function_type_list (V8HI_type_node,
10031 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10032 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10033 = build_function_type_list (V4SI_type_node,
10034 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10035 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10036 = build_function_type_list (V8HI_type_node,
10037 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10038 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10039 = build_function_type_list (V16QI_type_node,
10040 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10041 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10042 = build_function_type_list (V4SI_type_node,
10043 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10044 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10045 = build_function_type_list (V4SI_type_node,
10046 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10047 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10048 = build_function_type_list (V4SI_type_node,
10049 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10050 tree v4si_ftype_v8hi
10051 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10052 tree int_ftype_v4si_v4si
10053 = build_function_type_list (integer_type_node,
10054 V4SI_type_node, V4SI_type_node, NULL_TREE);
10055 tree int_ftype_v4sf_v4sf
10056 = build_function_type_list (integer_type_node,
10057 V4SF_type_node, V4SF_type_node, NULL_TREE);
10058 tree int_ftype_v16qi_v16qi
10059 = build_function_type_list (integer_type_node,
10060 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10061 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10062 = build_function_type_list (integer_type_node,
10063 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10064
6f317ef3 10065 /* Add the simple ternary operators. */
586de218 10066 d = bdesc_3arg;
ca7558fc 10067 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10068 {
2212663f
DB
10069 enum machine_mode mode0, mode1, mode2, mode3;
10070 tree type;
58646b77
PB
10071 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10072 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10073
58646b77
PB
10074 if (is_overloaded)
10075 {
10076 mode0 = VOIDmode;
10077 mode1 = VOIDmode;
10078 mode2 = VOIDmode;
10079 mode3 = VOIDmode;
10080 }
10081 else
10082 {
10083 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10084 continue;
f676971a 10085
58646b77
PB
10086 mode0 = insn_data[d->icode].operand[0].mode;
10087 mode1 = insn_data[d->icode].operand[1].mode;
10088 mode2 = insn_data[d->icode].operand[2].mode;
10089 mode3 = insn_data[d->icode].operand[3].mode;
10090 }
bb8df8a6 10091
2212663f
DB
10092 /* When all four are of the same mode. */
10093 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10094 {
10095 switch (mode0)
10096 {
58646b77
PB
10097 case VOIDmode:
10098 type = opaque_ftype_opaque_opaque_opaque;
10099 break;
617e0e1d
DB
10100 case V4SImode:
10101 type = v4si_ftype_v4si_v4si_v4si;
10102 break;
2212663f
DB
10103 case V4SFmode:
10104 type = v4sf_ftype_v4sf_v4sf_v4sf;
10105 break;
10106 case V8HImode:
10107 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10108 break;
2212663f
DB
10109 case V16QImode:
10110 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10111 break;
96038623
DE
10112 case V2SFmode:
10113 type = v2sf_ftype_v2sf_v2sf_v2sf;
10114 break;
2212663f 10115 default:
37409796 10116 gcc_unreachable ();
2212663f
DB
10117 }
10118 }
10119 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10120 {
2212663f
DB
10121 switch (mode0)
10122 {
10123 case V4SImode:
10124 type = v4si_ftype_v4si_v4si_v16qi;
10125 break;
10126 case V4SFmode:
10127 type = v4sf_ftype_v4sf_v4sf_v16qi;
10128 break;
10129 case V8HImode:
10130 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10131 break;
2212663f
DB
10132 case V16QImode:
10133 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10134 break;
2212663f 10135 default:
37409796 10136 gcc_unreachable ();
2212663f
DB
10137 }
10138 }
f676971a 10139 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10140 && mode3 == V4SImode)
24408032 10141 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10142 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10143 && mode3 == V4SImode)
24408032 10144 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10145 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10146 && mode3 == V4SImode)
24408032
AH
10147 type = v4sf_ftype_v4sf_v4sf_v4si;
10148
a7b376ee 10149 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10150 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10151 && mode3 == QImode)
b9e4e5d1 10152 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10153
a7b376ee 10154 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10155 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10156 && mode3 == QImode)
b9e4e5d1 10157 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10158
a7b376ee 10159 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10160 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10161 && mode3 == QImode)
b9e4e5d1 10162 type = v4si_ftype_v4si_v4si_int;
24408032 10163
a7b376ee 10164 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10165 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10166 && mode3 == QImode)
b9e4e5d1 10167 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10168
2212663f 10169 else
37409796 10170 gcc_unreachable ();
2212663f
DB
10171
10172 def_builtin (d->mask, d->name, type, d->code);
10173 }
10174
0ac081f6 10175 /* Add the simple binary operators. */
00b960c7 10176 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10177 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10178 {
10179 enum machine_mode mode0, mode1, mode2;
10180 tree type;
58646b77
PB
10181 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10182 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10183
58646b77
PB
10184 if (is_overloaded)
10185 {
10186 mode0 = VOIDmode;
10187 mode1 = VOIDmode;
10188 mode2 = VOIDmode;
10189 }
10190 else
bb8df8a6 10191 {
58646b77
PB
10192 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10193 continue;
f676971a 10194
58646b77
PB
10195 mode0 = insn_data[d->icode].operand[0].mode;
10196 mode1 = insn_data[d->icode].operand[1].mode;
10197 mode2 = insn_data[d->icode].operand[2].mode;
10198 }
0ac081f6
AH
10199
10200 /* When all three operands are of the same mode. */
10201 if (mode0 == mode1 && mode1 == mode2)
10202 {
10203 switch (mode0)
10204 {
58646b77
PB
10205 case VOIDmode:
10206 type = opaque_ftype_opaque_opaque;
10207 break;
0ac081f6
AH
10208 case V4SFmode:
10209 type = v4sf_ftype_v4sf_v4sf;
10210 break;
10211 case V4SImode:
10212 type = v4si_ftype_v4si_v4si;
10213 break;
10214 case V16QImode:
10215 type = v16qi_ftype_v16qi_v16qi;
10216 break;
10217 case V8HImode:
10218 type = v8hi_ftype_v8hi_v8hi;
10219 break;
a3170dc6
AH
10220 case V2SImode:
10221 type = v2si_ftype_v2si_v2si;
10222 break;
96038623
DE
10223 case V2SFmode:
10224 if (TARGET_PAIRED_FLOAT)
10225 type = v2sf_ftype_v2sf_v2sf;
10226 else
10227 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10228 break;
10229 case SImode:
10230 type = int_ftype_int_int;
10231 break;
0ac081f6 10232 default:
37409796 10233 gcc_unreachable ();
0ac081f6
AH
10234 }
10235 }
10236
10237 /* A few other combos we really don't want to do manually. */
10238
10239 /* vint, vfloat, vfloat. */
10240 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10241 type = v4si_ftype_v4sf_v4sf;
10242
10243 /* vshort, vchar, vchar. */
10244 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10245 type = v8hi_ftype_v16qi_v16qi;
10246
10247 /* vint, vshort, vshort. */
10248 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10249 type = v4si_ftype_v8hi_v8hi;
10250
10251 /* vshort, vint, vint. */
10252 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10253 type = v8hi_ftype_v4si_v4si;
10254
10255 /* vchar, vshort, vshort. */
10256 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10257 type = v16qi_ftype_v8hi_v8hi;
10258
10259 /* vint, vchar, vint. */
10260 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10261 type = v4si_ftype_v16qi_v4si;
10262
fa066a23
AH
10263 /* vint, vchar, vchar. */
10264 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10265 type = v4si_ftype_v16qi_v16qi;
10266
0ac081f6
AH
10267 /* vint, vshort, vint. */
10268 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10269 type = v4si_ftype_v8hi_v4si;
f676971a 10270
a7b376ee 10271 /* vint, vint, 5-bit literal. */
2212663f 10272 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10273 type = v4si_ftype_v4si_int;
f676971a 10274
a7b376ee 10275 /* vshort, vshort, 5-bit literal. */
2212663f 10276 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10277 type = v8hi_ftype_v8hi_int;
f676971a 10278
a7b376ee 10279 /* vchar, vchar, 5-bit literal. */
2212663f 10280 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10281 type = v16qi_ftype_v16qi_int;
0ac081f6 10282
a7b376ee 10283 /* vfloat, vint, 5-bit literal. */
617e0e1d 10284 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10285 type = v4sf_ftype_v4si_int;
f676971a 10286
a7b376ee 10287 /* vint, vfloat, 5-bit literal. */
617e0e1d 10288 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10289 type = v4si_ftype_v4sf_int;
617e0e1d 10290
a3170dc6
AH
10291 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10292 type = v2si_ftype_int_int;
10293
10294 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10295 type = v2si_ftype_v2si_char;
10296
10297 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10298 type = v2si_ftype_int_char;
10299
37409796 10300 else
0ac081f6 10301 {
37409796
NS
10302 /* int, x, x. */
10303 gcc_assert (mode0 == SImode);
0ac081f6
AH
10304 switch (mode1)
10305 {
10306 case V4SImode:
10307 type = int_ftype_v4si_v4si;
10308 break;
10309 case V4SFmode:
10310 type = int_ftype_v4sf_v4sf;
10311 break;
10312 case V16QImode:
10313 type = int_ftype_v16qi_v16qi;
10314 break;
10315 case V8HImode:
10316 type = int_ftype_v8hi_v8hi;
10317 break;
10318 default:
37409796 10319 gcc_unreachable ();
0ac081f6
AH
10320 }
10321 }
10322
2212663f
DB
10323 def_builtin (d->mask, d->name, type, d->code);
10324 }
24408032 10325
2212663f
DB
10326 /* Add the simple unary operators. */
10327 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10328 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10329 {
10330 enum machine_mode mode0, mode1;
10331 tree type;
58646b77
PB
10332 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10333 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10334
10335 if (is_overloaded)
10336 {
10337 mode0 = VOIDmode;
10338 mode1 = VOIDmode;
10339 }
10340 else
10341 {
10342 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10343 continue;
bb8df8a6 10344
58646b77
PB
10345 mode0 = insn_data[d->icode].operand[0].mode;
10346 mode1 = insn_data[d->icode].operand[1].mode;
10347 }
2212663f
DB
10348
10349 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10350 type = v4si_ftype_int;
2212663f 10351 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10352 type = v8hi_ftype_int;
2212663f 10353 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10354 type = v16qi_ftype_int;
58646b77
PB
10355 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10356 type = opaque_ftype_opaque;
617e0e1d
DB
10357 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10358 type = v4sf_ftype_v4sf;
20e26713
AH
10359 else if (mode0 == V8HImode && mode1 == V16QImode)
10360 type = v8hi_ftype_v16qi;
10361 else if (mode0 == V4SImode && mode1 == V8HImode)
10362 type = v4si_ftype_v8hi;
a3170dc6
AH
10363 else if (mode0 == V2SImode && mode1 == V2SImode)
10364 type = v2si_ftype_v2si;
10365 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10366 {
10367 if (TARGET_PAIRED_FLOAT)
10368 type = v2sf_ftype_v2sf;
10369 else
10370 type = v2sf_ftype_v2sf_spe;
10371 }
a3170dc6
AH
10372 else if (mode0 == V2SFmode && mode1 == V2SImode)
10373 type = v2sf_ftype_v2si;
10374 else if (mode0 == V2SImode && mode1 == V2SFmode)
10375 type = v2si_ftype_v2sf;
10376 else if (mode0 == V2SImode && mode1 == QImode)
10377 type = v2si_ftype_char;
2212663f 10378 else
37409796 10379 gcc_unreachable ();
2212663f 10380
0ac081f6
AH
10381 def_builtin (d->mask, d->name, type, d->code);
10382 }
10383}
10384
c15c90bb
ZW
10385static void
10386rs6000_init_libfuncs (void)
10387{
602ea4d3
JJ
10388 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10389 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10390 {
602ea4d3
JJ
10391 /* AIX library routines for float->int conversion. */
10392 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10393 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10394 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10395 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10396 }
c15c90bb 10397
602ea4d3 10398 if (!TARGET_IEEEQUAD)
98c41d98 10399 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10400 if (!TARGET_XL_COMPAT)
10401 {
10402 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10403 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10404 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10405 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10406
17caeff2 10407 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10408 {
10409 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10410 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10411 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10412 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10413 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10414 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10415 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10416
10417 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10418 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10419 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10420 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10421 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10422 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10423 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10424 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10425 }
b26941b4
JM
10426
10427 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10428 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10429 }
10430 else
10431 {
10432 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10433 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10434 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10435 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10436 }
c9034561 10437 else
c15c90bb 10438 {
c9034561 10439 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10440
10441 set_optab_libfunc (add_optab, TFmode, "_q_add");
10442 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10443 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10444 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10445 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10446 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10447 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10448
c9034561
ZW
10449 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10450 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10451 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10452 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10453 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10454 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10455
85363ca0
ZW
10456 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10457 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10458 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10459 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10460 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10461 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10462 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10463 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10464 }
10465}
fba73eb1
DE
10466
10467\f
10468/* Expand a block clear operation, and return 1 if successful. Return 0
10469 if we should let the compiler generate normal code.
10470
10471 operands[0] is the destination
10472 operands[1] is the length
57e84f18 10473 operands[3] is the alignment */
fba73eb1
DE
10474
10475int
10476expand_block_clear (rtx operands[])
10477{
10478 rtx orig_dest = operands[0];
10479 rtx bytes_rtx = operands[1];
57e84f18 10480 rtx align_rtx = operands[3];
5514620a
GK
10481 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10482 HOST_WIDE_INT align;
10483 HOST_WIDE_INT bytes;
fba73eb1
DE
10484 int offset;
10485 int clear_bytes;
5514620a 10486 int clear_step;
fba73eb1
DE
10487
10488 /* If this is not a fixed size move, just call memcpy */
10489 if (! constp)
10490 return 0;
10491
37409796
NS
10492 /* This must be a fixed size alignment */
10493 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10494 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10495
10496 /* Anything to clear? */
10497 bytes = INTVAL (bytes_rtx);
10498 if (bytes <= 0)
10499 return 1;
10500
5514620a
GK
10501 /* Use the builtin memset after a point, to avoid huge code bloat.
10502 When optimize_size, avoid any significant code bloat; calling
10503 memset is about 4 instructions, so allow for one instruction to
10504 load zero and three to do clearing. */
10505 if (TARGET_ALTIVEC && align >= 128)
10506 clear_step = 16;
10507 else if (TARGET_POWERPC64 && align >= 32)
10508 clear_step = 8;
21d818ff
NF
10509 else if (TARGET_SPE && align >= 64)
10510 clear_step = 8;
5514620a
GK
10511 else
10512 clear_step = 4;
fba73eb1 10513
5514620a
GK
10514 if (optimize_size && bytes > 3 * clear_step)
10515 return 0;
10516 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10517 return 0;
10518
10519 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10520 {
fba73eb1
DE
10521 enum machine_mode mode = BLKmode;
10522 rtx dest;
f676971a 10523
5514620a
GK
10524 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10525 {
10526 clear_bytes = 16;
10527 mode = V4SImode;
10528 }
21d818ff
NF
10529 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10530 {
10531 clear_bytes = 8;
10532 mode = V2SImode;
10533 }
5514620a 10534 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10535 /* 64-bit loads and stores require word-aligned
10536 displacements. */
10537 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10538 {
10539 clear_bytes = 8;
10540 mode = DImode;
fba73eb1 10541 }
5514620a 10542 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10543 { /* move 4 bytes */
10544 clear_bytes = 4;
10545 mode = SImode;
fba73eb1 10546 }
ec53fc93 10547 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10548 { /* move 2 bytes */
10549 clear_bytes = 2;
10550 mode = HImode;
fba73eb1
DE
10551 }
10552 else /* move 1 byte at a time */
10553 {
10554 clear_bytes = 1;
10555 mode = QImode;
fba73eb1 10556 }
f676971a 10557
fba73eb1 10558 dest = adjust_address (orig_dest, mode, offset);
f676971a 10559
5514620a 10560 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10561 }
10562
10563 return 1;
10564}
10565
35aff10b 10566\f
7e69e155
MM
10567/* Expand a block move operation, and return 1 if successful. Return 0
10568 if we should let the compiler generate normal code.
10569
10570 operands[0] is the destination
10571 operands[1] is the source
10572 operands[2] is the length
10573 operands[3] is the alignment */
10574
3933e0e1
MM
10575#define MAX_MOVE_REG 4
10576
7e69e155 10577int
a2369ed3 10578expand_block_move (rtx operands[])
7e69e155 10579{
b6c9286a
MM
10580 rtx orig_dest = operands[0];
10581 rtx orig_src = operands[1];
7e69e155 10582 rtx bytes_rtx = operands[2];
7e69e155 10583 rtx align_rtx = operands[3];
3933e0e1 10584 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10585 int align;
3933e0e1
MM
10586 int bytes;
10587 int offset;
7e69e155 10588 int move_bytes;
cabfd258
GK
10589 rtx stores[MAX_MOVE_REG];
10590 int num_reg = 0;
7e69e155 10591
3933e0e1 10592 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10593 if (! constp)
3933e0e1
MM
10594 return 0;
10595
37409796
NS
10596 /* This must be a fixed size alignment */
10597 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10598 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10599
7e69e155 10600 /* Anything to move? */
3933e0e1
MM
10601 bytes = INTVAL (bytes_rtx);
10602 if (bytes <= 0)
7e69e155
MM
10603 return 1;
10604
ea9982a8 10605 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10606 reg_parm_stack_space. */
ea9982a8 10607 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10608 return 0;
10609
cabfd258 10610 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10611 {
cabfd258 10612 union {
70128ad9 10613 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10614 rtx (*mov) (rtx, rtx);
cabfd258
GK
10615 } gen_func;
10616 enum machine_mode mode = BLKmode;
10617 rtx src, dest;
f676971a 10618
5514620a
GK
10619 /* Altivec first, since it will be faster than a string move
10620 when it applies, and usually not significantly larger. */
10621 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10622 {
10623 move_bytes = 16;
10624 mode = V4SImode;
10625 gen_func.mov = gen_movv4si;
10626 }
21d818ff
NF
10627 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10628 {
10629 move_bytes = 8;
10630 mode = V2SImode;
10631 gen_func.mov = gen_movv2si;
10632 }
5514620a 10633 else if (TARGET_STRING
cabfd258
GK
10634 && bytes > 24 /* move up to 32 bytes at a time */
10635 && ! fixed_regs[5]
10636 && ! fixed_regs[6]
10637 && ! fixed_regs[7]
10638 && ! fixed_regs[8]
10639 && ! fixed_regs[9]
10640 && ! fixed_regs[10]
10641 && ! fixed_regs[11]
10642 && ! fixed_regs[12])
7e69e155 10643 {
cabfd258 10644 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10645 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10646 }
10647 else if (TARGET_STRING
10648 && bytes > 16 /* move up to 24 bytes at a time */
10649 && ! fixed_regs[5]
10650 && ! fixed_regs[6]
10651 && ! fixed_regs[7]
10652 && ! fixed_regs[8]
10653 && ! fixed_regs[9]
10654 && ! fixed_regs[10])
10655 {
10656 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10657 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10658 }
10659 else if (TARGET_STRING
10660 && bytes > 8 /* move up to 16 bytes at a time */
10661 && ! fixed_regs[5]
10662 && ! fixed_regs[6]
10663 && ! fixed_regs[7]
10664 && ! fixed_regs[8])
10665 {
10666 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10667 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10668 }
10669 else if (bytes >= 8 && TARGET_POWERPC64
10670 /* 64-bit loads and stores require word-aligned
10671 displacements. */
fba73eb1 10672 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10673 {
10674 move_bytes = 8;
10675 mode = DImode;
10676 gen_func.mov = gen_movdi;
10677 }
10678 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10679 { /* move up to 8 bytes at a time */
10680 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10681 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10682 }
cd7d9ca4 10683 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10684 { /* move 4 bytes */
10685 move_bytes = 4;
10686 mode = SImode;
10687 gen_func.mov = gen_movsi;
10688 }
ec53fc93 10689 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10690 { /* move 2 bytes */
10691 move_bytes = 2;
10692 mode = HImode;
10693 gen_func.mov = gen_movhi;
10694 }
10695 else if (TARGET_STRING && bytes > 1)
10696 { /* move up to 4 bytes at a time */
10697 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10698 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10699 }
10700 else /* move 1 byte at a time */
10701 {
10702 move_bytes = 1;
10703 mode = QImode;
10704 gen_func.mov = gen_movqi;
10705 }
f676971a 10706
cabfd258
GK
10707 src = adjust_address (orig_src, mode, offset);
10708 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10709
10710 if (mode != BLKmode)
cabfd258
GK
10711 {
10712 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10713
cabfd258
GK
10714 emit_insn ((*gen_func.mov) (tmp_reg, src));
10715 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10716 }
3933e0e1 10717
cabfd258
GK
10718 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10719 {
10720 int i;
10721 for (i = 0; i < num_reg; i++)
10722 emit_insn (stores[i]);
10723 num_reg = 0;
10724 }
35aff10b 10725
cabfd258 10726 if (mode == BLKmode)
7e69e155 10727 {
70128ad9 10728 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10729 patterns require zero offset. */
10730 if (!REG_P (XEXP (src, 0)))
b6c9286a 10731 {
cabfd258
GK
10732 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10733 src = replace_equiv_address (src, src_reg);
b6c9286a 10734 }
cabfd258 10735 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10736
cabfd258 10737 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10738 {
cabfd258
GK
10739 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10740 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10741 }
cabfd258 10742 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10743
70128ad9 10744 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10745 GEN_INT (move_bytes & 31),
10746 align_rtx));
7e69e155 10747 }
7e69e155
MM
10748 }
10749
10750 return 1;
10751}
10752
d62294f5 10753\f
9caa3eb2
DE
10754/* Return a string to perform a load_multiple operation.
10755 operands[0] is the vector.
10756 operands[1] is the source address.
10757 operands[2] is the first destination register. */
10758
10759const char *
a2369ed3 10760rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10761{
10762 /* We have to handle the case where the pseudo used to contain the address
10763 is assigned to one of the output registers. */
10764 int i, j;
10765 int words = XVECLEN (operands[0], 0);
10766 rtx xop[10];
10767
10768 if (XVECLEN (operands[0], 0) == 1)
10769 return "{l|lwz} %2,0(%1)";
10770
10771 for (i = 0; i < words; i++)
10772 if (refers_to_regno_p (REGNO (operands[2]) + i,
10773 REGNO (operands[2]) + i + 1, operands[1], 0))
10774 {
10775 if (i == words-1)
10776 {
10777 xop[0] = GEN_INT (4 * (words-1));
10778 xop[1] = operands[1];
10779 xop[2] = operands[2];
10780 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10781 return "";
10782 }
10783 else if (i == 0)
10784 {
10785 xop[0] = GEN_INT (4 * (words-1));
10786 xop[1] = operands[1];
10787 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10788 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10789 return "";
10790 }
10791 else
10792 {
10793 for (j = 0; j < words; j++)
10794 if (j != i)
10795 {
10796 xop[0] = GEN_INT (j * 4);
10797 xop[1] = operands[1];
10798 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10799 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10800 }
10801 xop[0] = GEN_INT (i * 4);
10802 xop[1] = operands[1];
10803 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10804 return "";
10805 }
10806 }
10807
10808 return "{lsi|lswi} %2,%1,%N0";
10809}
10810
9878760c 10811\f
a4f6c312
SS
10812/* A validation routine: say whether CODE, a condition code, and MODE
10813 match. The other alternatives either don't make sense or should
10814 never be generated. */
39a10a29 10815
48d72335 10816void
a2369ed3 10817validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10818{
37409796
NS
10819 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10820 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10821 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10822
10823 /* These don't make sense. */
37409796
NS
10824 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10825 || mode != CCUNSmode);
39a10a29 10826
37409796
NS
10827 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10828 || mode == CCUNSmode);
39a10a29 10829
37409796
NS
10830 gcc_assert (mode == CCFPmode
10831 || (code != ORDERED && code != UNORDERED
10832 && code != UNEQ && code != LTGT
10833 && code != UNGT && code != UNLT
10834 && code != UNGE && code != UNLE));
f676971a
EC
10835
10836 /* These should never be generated except for
bc9ec0e0 10837 flag_finite_math_only. */
37409796
NS
10838 gcc_assert (mode != CCFPmode
10839 || flag_finite_math_only
10840 || (code != LE && code != GE
10841 && code != UNEQ && code != LTGT
10842 && code != UNGT && code != UNLT));
39a10a29
GK
10843
10844 /* These are invalid; the information is not there. */
37409796 10845 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10846}
10847
9878760c
RK
10848\f
10849/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10850 mask required to convert the result of a rotate insn into a shift
b1765bde 10851 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10852
10853int
a2369ed3 10854includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10855{
e2c953b6
DE
10856 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10857
10858 shift_mask <<= INTVAL (shiftop);
9878760c 10859
b1765bde 10860 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10861}
10862
10863/* Similar, but for right shift. */
10864
10865int
a2369ed3 10866includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10867{
a7653a2c 10868 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10869
10870 shift_mask >>= INTVAL (shiftop);
10871
b1765bde 10872 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10873}
10874
c5059423
AM
10875/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10876 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10877 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10878
10879int
a2369ed3 10880includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10881{
c5059423
AM
10882 if (GET_CODE (andop) == CONST_INT)
10883 {
02071907 10884 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10885
c5059423 10886 c = INTVAL (andop);
02071907 10887 if (c == 0 || c == ~0)
c5059423 10888 return 0;
e2c953b6 10889
02071907 10890 shift_mask = ~0;
c5059423
AM
10891 shift_mask <<= INTVAL (shiftop);
10892
b6d08ca1 10893 /* Find the least significant one bit. */
c5059423
AM
10894 lsb = c & -c;
10895
10896 /* It must coincide with the LSB of the shift mask. */
10897 if (-lsb != shift_mask)
10898 return 0;
e2c953b6 10899
c5059423
AM
10900 /* Invert to look for the next transition (if any). */
10901 c = ~c;
10902
10903 /* Remove the low group of ones (originally low group of zeros). */
10904 c &= -lsb;
10905
10906 /* Again find the lsb, and check we have all 1's above. */
10907 lsb = c & -c;
10908 return c == -lsb;
10909 }
10910 else if (GET_CODE (andop) == CONST_DOUBLE
10911 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10912 {
02071907
AM
10913 HOST_WIDE_INT low, high, lsb;
10914 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10915
10916 low = CONST_DOUBLE_LOW (andop);
10917 if (HOST_BITS_PER_WIDE_INT < 64)
10918 high = CONST_DOUBLE_HIGH (andop);
10919
10920 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10921 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10922 return 0;
10923
10924 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10925 {
02071907 10926 shift_mask_high = ~0;
c5059423
AM
10927 if (INTVAL (shiftop) > 32)
10928 shift_mask_high <<= INTVAL (shiftop) - 32;
10929
10930 lsb = high & -high;
10931
10932 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10933 return 0;
10934
10935 high = ~high;
10936 high &= -lsb;
10937
10938 lsb = high & -high;
10939 return high == -lsb;
10940 }
10941
02071907 10942 shift_mask_low = ~0;
c5059423
AM
10943 shift_mask_low <<= INTVAL (shiftop);
10944
10945 lsb = low & -low;
10946
10947 if (-lsb != shift_mask_low)
10948 return 0;
10949
10950 if (HOST_BITS_PER_WIDE_INT < 64)
10951 high = ~high;
10952 low = ~low;
10953 low &= -lsb;
10954
10955 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10956 {
10957 lsb = high & -high;
10958 return high == -lsb;
10959 }
10960
10961 lsb = low & -low;
10962 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10963 }
10964 else
10965 return 0;
10966}
e2c953b6 10967
c5059423
AM
10968/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10969 to perform a left shift. It must have SHIFTOP or more least
c1207243 10970 significant 0's, with the remainder of the word 1's. */
e2c953b6 10971
c5059423 10972int
a2369ed3 10973includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10974{
e2c953b6 10975 if (GET_CODE (andop) == CONST_INT)
c5059423 10976 {
02071907 10977 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10978
02071907 10979 shift_mask = ~0;
c5059423
AM
10980 shift_mask <<= INTVAL (shiftop);
10981 c = INTVAL (andop);
10982
c1207243 10983 /* Find the least significant one bit. */
c5059423
AM
10984 lsb = c & -c;
10985
10986 /* It must be covered by the shift mask.
a4f6c312 10987 This test also rejects c == 0. */
c5059423
AM
10988 if ((lsb & shift_mask) == 0)
10989 return 0;
10990
10991 /* Check we have all 1's above the transition, and reject all 1's. */
10992 return c == -lsb && lsb != 1;
10993 }
10994 else if (GET_CODE (andop) == CONST_DOUBLE
10995 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10996 {
02071907 10997 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10998
10999 low = CONST_DOUBLE_LOW (andop);
11000
11001 if (HOST_BITS_PER_WIDE_INT < 64)
11002 {
02071907 11003 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11004
11005 high = CONST_DOUBLE_HIGH (andop);
11006
11007 if (low == 0)
11008 {
02071907 11009 shift_mask_high = ~0;
c5059423
AM
11010 if (INTVAL (shiftop) > 32)
11011 shift_mask_high <<= INTVAL (shiftop) - 32;
11012
11013 lsb = high & -high;
11014
11015 if ((lsb & shift_mask_high) == 0)
11016 return 0;
11017
11018 return high == -lsb;
11019 }
11020 if (high != ~0)
11021 return 0;
11022 }
11023
02071907 11024 shift_mask_low = ~0;
c5059423
AM
11025 shift_mask_low <<= INTVAL (shiftop);
11026
11027 lsb = low & -low;
11028
11029 if ((lsb & shift_mask_low) == 0)
11030 return 0;
11031
11032 return low == -lsb && lsb != 1;
11033 }
e2c953b6 11034 else
c5059423 11035 return 0;
9878760c 11036}
35068b43 11037
11ac38b2
DE
11038/* Return 1 if operands will generate a valid arguments to rlwimi
11039instruction for insert with right shift in 64-bit mode. The mask may
11040not start on the first bit or stop on the last bit because wrap-around
11041effects of instruction do not correspond to semantics of RTL insn. */
11042
11043int
11044insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11045{
429ec7dc
DE
11046 if (INTVAL (startop) > 32
11047 && INTVAL (startop) < 64
11048 && INTVAL (sizeop) > 1
11049 && INTVAL (sizeop) + INTVAL (startop) < 64
11050 && INTVAL (shiftop) > 0
11051 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11052 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11053 return 1;
11054
11055 return 0;
11056}
11057
35068b43 11058/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11059 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11060
11061int
a2369ed3 11062registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11063{
11064 /* We might have been passed a SUBREG. */
f676971a 11065 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11066 return 0;
f676971a 11067
90f81f99
AP
11068 /* We might have been passed non floating point registers. */
11069 if (!FP_REGNO_P (REGNO (reg1))
11070 || !FP_REGNO_P (REGNO (reg2)))
11071 return 0;
35068b43
RK
11072
11073 return (REGNO (reg1) == REGNO (reg2) - 1);
11074}
11075
a4f6c312
SS
11076/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11077 addr1 and addr2 must be in consecutive memory locations
11078 (addr2 == addr1 + 8). */
35068b43
RK
11079
11080int
90f81f99 11081mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11082{
90f81f99 11083 rtx addr1, addr2;
bb8df8a6
EC
11084 unsigned int reg1, reg2;
11085 int offset1, offset2;
35068b43 11086
90f81f99
AP
11087 /* The mems cannot be volatile. */
11088 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11089 return 0;
f676971a 11090
90f81f99
AP
11091 addr1 = XEXP (mem1, 0);
11092 addr2 = XEXP (mem2, 0);
11093
35068b43
RK
11094 /* Extract an offset (if used) from the first addr. */
11095 if (GET_CODE (addr1) == PLUS)
11096 {
11097 /* If not a REG, return zero. */
11098 if (GET_CODE (XEXP (addr1, 0)) != REG)
11099 return 0;
11100 else
11101 {
c4ad648e 11102 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11103 /* The offset must be constant! */
11104 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11105 return 0;
11106 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11107 }
11108 }
11109 else if (GET_CODE (addr1) != REG)
11110 return 0;
11111 else
11112 {
11113 reg1 = REGNO (addr1);
11114 /* This was a simple (mem (reg)) expression. Offset is 0. */
11115 offset1 = 0;
11116 }
11117
bb8df8a6
EC
11118 /* And now for the second addr. */
11119 if (GET_CODE (addr2) == PLUS)
11120 {
11121 /* If not a REG, return zero. */
11122 if (GET_CODE (XEXP (addr2, 0)) != REG)
11123 return 0;
11124 else
11125 {
11126 reg2 = REGNO (XEXP (addr2, 0));
11127 /* The offset must be constant. */
11128 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11129 return 0;
11130 offset2 = INTVAL (XEXP (addr2, 1));
11131 }
11132 }
11133 else if (GET_CODE (addr2) != REG)
35068b43 11134 return 0;
bb8df8a6
EC
11135 else
11136 {
11137 reg2 = REGNO (addr2);
11138 /* This was a simple (mem (reg)) expression. Offset is 0. */
11139 offset2 = 0;
11140 }
35068b43 11141
bb8df8a6
EC
11142 /* Both of these must have the same base register. */
11143 if (reg1 != reg2)
35068b43
RK
11144 return 0;
11145
11146 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11147 if (offset2 != offset1 + 8)
35068b43
RK
11148 return 0;
11149
11150 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11151 instructions. */
11152 return 1;
11153}
9878760c 11154\f
e41b2a33
PB
11155
11156rtx
11157rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11158{
11159 static bool eliminated = false;
11160 if (mode != SDmode)
11161 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11162 else
11163 {
11164 rtx mem = cfun->machine->sdmode_stack_slot;
11165 gcc_assert (mem != NULL_RTX);
11166
11167 if (!eliminated)
11168 {
11169 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11170 cfun->machine->sdmode_stack_slot = mem;
11171 eliminated = true;
11172 }
11173 return mem;
11174 }
11175}
11176
11177static tree
11178rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11179{
11180 /* Don't walk into types. */
11181 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11182 {
11183 *walk_subtrees = 0;
11184 return NULL_TREE;
11185 }
11186
11187 switch (TREE_CODE (*tp))
11188 {
11189 case VAR_DECL:
11190 case PARM_DECL:
11191 case FIELD_DECL:
11192 case RESULT_DECL:
11193 case REAL_CST:
11194 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11195 return *tp;
11196 break;
11197 default:
11198 break;
11199 }
11200
11201 return NULL_TREE;
11202}
11203
11204
11205/* Allocate a 64-bit stack slot to be used for copying SDmode
11206 values through if this function has any SDmode references. */
11207
11208static void
11209rs6000_alloc_sdmode_stack_slot (void)
11210{
11211 tree t;
11212 basic_block bb;
11213 block_stmt_iterator bsi;
11214
11215 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11216
11217 FOR_EACH_BB (bb)
11218 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11219 {
11220 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11221 rs6000_check_sdmode, NULL);
11222 if (ret)
11223 {
11224 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11225 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11226 SDmode, 0);
11227 return;
11228 }
11229 }
11230
11231 /* Check for any SDmode parameters of the function. */
11232 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11233 {
11234 if (TREE_TYPE (t) == error_mark_node)
11235 continue;
11236
11237 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11238 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11239 {
11240 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11241 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11242 SDmode, 0);
11243 return;
11244 }
11245 }
11246}
11247
11248static void
11249rs6000_instantiate_decls (void)
11250{
11251 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11252 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11253}
11254
9878760c
RK
11255/* Return the register class of a scratch register needed to copy IN into
11256 or out of a register in CLASS in MODE. If it can be done directly,
11257 NO_REGS is returned. */
11258
11259enum reg_class
3c4774e0
R
11260rs6000_secondary_reload_class (enum reg_class class,
11261 enum machine_mode mode ATTRIBUTE_UNUSED,
11262 rtx in)
9878760c 11263{
5accd822 11264 int regno;
9878760c 11265
ab82a49f
AP
11266 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11267#if TARGET_MACHO
c4ad648e 11268 && MACHOPIC_INDIRECT
ab82a49f 11269#endif
c4ad648e 11270 ))
46fad5b7
DJ
11271 {
11272 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11273 other than BASE_REGS for TARGET_ELF. So indicate that a
11274 register from BASE_REGS is needed as an intermediate
11275 register.
f676971a 11276
46fad5b7
DJ
11277 On Darwin, pic addresses require a load from memory, which
11278 needs a base register. */
11279 if (class != BASE_REGS
c4ad648e
AM
11280 && (GET_CODE (in) == SYMBOL_REF
11281 || GET_CODE (in) == HIGH
11282 || GET_CODE (in) == LABEL_REF
11283 || GET_CODE (in) == CONST))
11284 return BASE_REGS;
46fad5b7 11285 }
e7b7998a 11286
5accd822
DE
11287 if (GET_CODE (in) == REG)
11288 {
11289 regno = REGNO (in);
11290 if (regno >= FIRST_PSEUDO_REGISTER)
11291 {
11292 regno = true_regnum (in);
11293 if (regno >= FIRST_PSEUDO_REGISTER)
11294 regno = -1;
11295 }
11296 }
11297 else if (GET_CODE (in) == SUBREG)
11298 {
11299 regno = true_regnum (in);
11300 if (regno >= FIRST_PSEUDO_REGISTER)
11301 regno = -1;
11302 }
11303 else
11304 regno = -1;
11305
9878760c
RK
11306 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11307 into anything. */
11308 if (class == GENERAL_REGS || class == BASE_REGS
11309 || (regno >= 0 && INT_REGNO_P (regno)))
11310 return NO_REGS;
11311
11312 /* Constants, memory, and FP registers can go into FP registers. */
11313 if ((regno == -1 || FP_REGNO_P (regno))
11314 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11315 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11316
0ac081f6
AH
11317 /* Memory, and AltiVec registers can go into AltiVec registers. */
11318 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11319 && class == ALTIVEC_REGS)
11320 return NO_REGS;
11321
9878760c
RK
11322 /* We can copy among the CR registers. */
11323 if ((class == CR_REGS || class == CR0_REGS)
11324 && regno >= 0 && CR_REGNO_P (regno))
11325 return NO_REGS;
11326
11327 /* Otherwise, we need GENERAL_REGS. */
11328 return GENERAL_REGS;
11329}
11330\f
11331/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11332 know this is a valid comparison.
9878760c
RK
11333
11334 SCC_P is 1 if this is for an scc. That means that %D will have been
11335 used instead of %C, so the bits will be in different places.
11336
b4ac57ab 11337 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11338
11339int
a2369ed3 11340ccr_bit (rtx op, int scc_p)
9878760c
RK
11341{
11342 enum rtx_code code = GET_CODE (op);
11343 enum machine_mode cc_mode;
11344 int cc_regnum;
11345 int base_bit;
9ebbca7d 11346 rtx reg;
9878760c 11347
ec8e098d 11348 if (!COMPARISON_P (op))
9878760c
RK
11349 return -1;
11350
9ebbca7d
GK
11351 reg = XEXP (op, 0);
11352
37409796 11353 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11354
11355 cc_mode = GET_MODE (reg);
11356 cc_regnum = REGNO (reg);
11357 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11358
39a10a29 11359 validate_condition_mode (code, cc_mode);
c5defebb 11360
b7053a3f
GK
11361 /* When generating a sCOND operation, only positive conditions are
11362 allowed. */
37409796
NS
11363 gcc_assert (!scc_p
11364 || code == EQ || code == GT || code == LT || code == UNORDERED
11365 || code == GTU || code == LTU);
f676971a 11366
9878760c
RK
11367 switch (code)
11368 {
11369 case NE:
11370 return scc_p ? base_bit + 3 : base_bit + 2;
11371 case EQ:
11372 return base_bit + 2;
1c882ea4 11373 case GT: case GTU: case UNLE:
9878760c 11374 return base_bit + 1;
1c882ea4 11375 case LT: case LTU: case UNGE:
9878760c 11376 return base_bit;
1c882ea4
GK
11377 case ORDERED: case UNORDERED:
11378 return base_bit + 3;
9878760c
RK
11379
11380 case GE: case GEU:
39a10a29 11381 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11382 unordered position. So test that bit. For integer, this is ! LT
11383 unless this is an scc insn. */
39a10a29 11384 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11385
11386 case LE: case LEU:
39a10a29 11387 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11388
9878760c 11389 default:
37409796 11390 gcc_unreachable ();
9878760c
RK
11391 }
11392}
1ff7789b 11393\f
8d30c4ee 11394/* Return the GOT register. */
1ff7789b 11395
9390387d 11396rtx
a2369ed3 11397rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11398{
a4f6c312
SS
11399 /* The second flow pass currently (June 1999) can't update
11400 regs_ever_live without disturbing other parts of the compiler, so
11401 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11402 if (!can_create_pseudo_p ()
11403 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11404 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11405
8d30c4ee 11406 current_function_uses_pic_offset_table = 1;
3cb999d8 11407
1ff7789b
MM
11408 return pic_offset_table_rtx;
11409}
a7df97e6 11410\f
e2500fed
GK
11411/* Function to init struct machine_function.
11412 This will be called, via a pointer variable,
11413 from push_function_context. */
a7df97e6 11414
e2500fed 11415static struct machine_function *
863d938c 11416rs6000_init_machine_status (void)
a7df97e6 11417{
e2500fed 11418 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11419}
9878760c 11420\f
0ba1b2ff
AM
11421/* These macros test for integers and extract the low-order bits. */
11422#define INT_P(X) \
11423((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11424 && GET_MODE (X) == VOIDmode)
11425
11426#define INT_LOWPART(X) \
11427 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11428
11429int
a2369ed3 11430extract_MB (rtx op)
0ba1b2ff
AM
11431{
11432 int i;
11433 unsigned long val = INT_LOWPART (op);
11434
11435 /* If the high bit is zero, the value is the first 1 bit we find
11436 from the left. */
11437 if ((val & 0x80000000) == 0)
11438 {
37409796 11439 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11440
11441 i = 1;
11442 while (((val <<= 1) & 0x80000000) == 0)
11443 ++i;
11444 return i;
11445 }
11446
11447 /* If the high bit is set and the low bit is not, or the mask is all
11448 1's, the value is zero. */
11449 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11450 return 0;
11451
11452 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11453 from the right. */
11454 i = 31;
11455 while (((val >>= 1) & 1) != 0)
11456 --i;
11457
11458 return i;
11459}
11460
11461int
a2369ed3 11462extract_ME (rtx op)
0ba1b2ff
AM
11463{
11464 int i;
11465 unsigned long val = INT_LOWPART (op);
11466
11467 /* If the low bit is zero, the value is the first 1 bit we find from
11468 the right. */
11469 if ((val & 1) == 0)
11470 {
37409796 11471 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11472
11473 i = 30;
11474 while (((val >>= 1) & 1) == 0)
11475 --i;
11476
11477 return i;
11478 }
11479
11480 /* If the low bit is set and the high bit is not, or the mask is all
11481 1's, the value is 31. */
11482 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11483 return 31;
11484
11485 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11486 from the left. */
11487 i = 0;
11488 while (((val <<= 1) & 0x80000000) != 0)
11489 ++i;
11490
11491 return i;
11492}
11493
c4501e62
JJ
11494/* Locate some local-dynamic symbol still in use by this function
11495 so that we can print its name in some tls_ld pattern. */
11496
11497static const char *
863d938c 11498rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11499{
11500 rtx insn;
11501
11502 if (cfun->machine->some_ld_name)
11503 return cfun->machine->some_ld_name;
11504
11505 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11506 if (INSN_P (insn)
11507 && for_each_rtx (&PATTERN (insn),
11508 rs6000_get_some_local_dynamic_name_1, 0))
11509 return cfun->machine->some_ld_name;
11510
37409796 11511 gcc_unreachable ();
c4501e62
JJ
11512}
11513
11514/* Helper function for rs6000_get_some_local_dynamic_name. */
11515
11516static int
a2369ed3 11517rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11518{
11519 rtx x = *px;
11520
11521 if (GET_CODE (x) == SYMBOL_REF)
11522 {
11523 const char *str = XSTR (x, 0);
11524 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11525 {
11526 cfun->machine->some_ld_name = str;
11527 return 1;
11528 }
11529 }
11530
11531 return 0;
11532}
11533
85b776df
AM
11534/* Write out a function code label. */
11535
11536void
11537rs6000_output_function_entry (FILE *file, const char *fname)
11538{
11539 if (fname[0] != '.')
11540 {
11541 switch (DEFAULT_ABI)
11542 {
11543 default:
37409796 11544 gcc_unreachable ();
85b776df
AM
11545
11546 case ABI_AIX:
11547 if (DOT_SYMBOLS)
11548 putc ('.', file);
11549 else
11550 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11551 break;
11552
11553 case ABI_V4:
11554 case ABI_DARWIN:
11555 break;
11556 }
11557 }
11558 if (TARGET_AIX)
11559 RS6000_OUTPUT_BASENAME (file, fname);
11560 else
11561 assemble_name (file, fname);
11562}
11563
9878760c
RK
11564/* Print an operand. Recognize special options, documented below. */
11565
38c1f2d7 11566#if TARGET_ELF
d9407988 11567#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11568#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11569#else
11570#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11571#define SMALL_DATA_REG 0
ba5e43aa
MM
11572#endif
11573
9878760c 11574void
a2369ed3 11575print_operand (FILE *file, rtx x, int code)
9878760c
RK
11576{
11577 int i;
a260abc9 11578 HOST_WIDE_INT val;
0ba1b2ff 11579 unsigned HOST_WIDE_INT uval;
9878760c
RK
11580
11581 switch (code)
11582 {
a8b3aeda 11583 case '.':
a85d226b
RK
11584 /* Write out an instruction after the call which may be replaced
11585 with glue code by the loader. This depends on the AIX version. */
11586 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11587 return;
11588
81eace42
GK
11589 /* %a is output_address. */
11590
9854d9ed
RK
11591 case 'A':
11592 /* If X is a constant integer whose low-order 5 bits are zero,
11593 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11594 in the AIX assembler where "sri" with a zero shift count
20e26713 11595 writes a trash instruction. */
9854d9ed 11596 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11597 putc ('l', file);
9854d9ed 11598 else
76229ac8 11599 putc ('r', file);
9854d9ed
RK
11600 return;
11601
11602 case 'b':
e2c953b6
DE
11603 /* If constant, low-order 16 bits of constant, unsigned.
11604 Otherwise, write normally. */
11605 if (INT_P (x))
11606 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11607 else
11608 print_operand (file, x, 0);
cad12a8d
RK
11609 return;
11610
a260abc9
DE
11611 case 'B':
11612 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11613 for 64-bit mask direction. */
9390387d 11614 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11615 return;
a260abc9 11616
81eace42
GK
11617 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11618 output_operand. */
11619
423c1189
AH
11620 case 'c':
11621 /* X is a CR register. Print the number of the GT bit of the CR. */
11622 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11623 output_operand_lossage ("invalid %%E value");
11624 else
11625 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11626 return;
11627
11628 case 'D':
cef6b86c 11629 /* Like 'J' but get to the GT bit only. */
37409796 11630 gcc_assert (GET_CODE (x) == REG);
423c1189 11631
cef6b86c
EB
11632 /* Bit 1 is GT bit. */
11633 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11634
cef6b86c
EB
11635 /* Add one for shift count in rlinm for scc. */
11636 fprintf (file, "%d", i + 1);
423c1189
AH
11637 return;
11638
9854d9ed 11639 case 'E':
39a10a29 11640 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11641 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11642 output_operand_lossage ("invalid %%E value");
78fbdbf7 11643 else
39a10a29 11644 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11645 return;
9854d9ed
RK
11646
11647 case 'f':
11648 /* X is a CR register. Print the shift count needed to move it
11649 to the high-order four bits. */
11650 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11651 output_operand_lossage ("invalid %%f value");
11652 else
9ebbca7d 11653 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11654 return;
11655
11656 case 'F':
11657 /* Similar, but print the count for the rotate in the opposite
11658 direction. */
11659 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11660 output_operand_lossage ("invalid %%F value");
11661 else
9ebbca7d 11662 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11663 return;
11664
11665 case 'G':
11666 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11667 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11668 if (GET_CODE (x) != CONST_INT)
11669 output_operand_lossage ("invalid %%G value");
11670 else if (INTVAL (x) >= 0)
76229ac8 11671 putc ('z', file);
9854d9ed 11672 else
76229ac8 11673 putc ('m', file);
9854d9ed 11674 return;
e2c953b6 11675
9878760c 11676 case 'h':
a4f6c312
SS
11677 /* If constant, output low-order five bits. Otherwise, write
11678 normally. */
9878760c 11679 if (INT_P (x))
5f59ecb7 11680 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11681 else
11682 print_operand (file, x, 0);
11683 return;
11684
64305719 11685 case 'H':
a4f6c312
SS
11686 /* If constant, output low-order six bits. Otherwise, write
11687 normally. */
64305719 11688 if (INT_P (x))
5f59ecb7 11689 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11690 else
11691 print_operand (file, x, 0);
11692 return;
11693
9854d9ed
RK
11694 case 'I':
11695 /* Print `i' if this is a constant, else nothing. */
9878760c 11696 if (INT_P (x))
76229ac8 11697 putc ('i', file);
9878760c
RK
11698 return;
11699
9854d9ed
RK
11700 case 'j':
11701 /* Write the bit number in CCR for jump. */
11702 i = ccr_bit (x, 0);
11703 if (i == -1)
11704 output_operand_lossage ("invalid %%j code");
9878760c 11705 else
9854d9ed 11706 fprintf (file, "%d", i);
9878760c
RK
11707 return;
11708
9854d9ed
RK
11709 case 'J':
11710 /* Similar, but add one for shift count in rlinm for scc and pass
11711 scc flag to `ccr_bit'. */
11712 i = ccr_bit (x, 1);
11713 if (i == -1)
11714 output_operand_lossage ("invalid %%J code");
11715 else
a0466a68
RK
11716 /* If we want bit 31, write a shift count of zero, not 32. */
11717 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11718 return;
11719
9854d9ed
RK
11720 case 'k':
11721 /* X must be a constant. Write the 1's complement of the
11722 constant. */
9878760c 11723 if (! INT_P (x))
9854d9ed 11724 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11725 else
11726 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11727 return;
11728
81eace42 11729 case 'K':
9ebbca7d
GK
11730 /* X must be a symbolic constant on ELF. Write an
11731 expression suitable for an 'addi' that adds in the low 16
11732 bits of the MEM. */
11733 if (GET_CODE (x) != CONST)
11734 {
11735 print_operand_address (file, x);
11736 fputs ("@l", file);
11737 }
11738 else
11739 {
11740 if (GET_CODE (XEXP (x, 0)) != PLUS
11741 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11742 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11743 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11744 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11745 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11746 fputs ("@l", file);
ed8d2920
MM
11747 /* For GNU as, there must be a non-alphanumeric character
11748 between 'l' and the number. The '-' is added by
11749 print_operand() already. */
11750 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11751 fputs ("+", file);
9ebbca7d
GK
11752 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11753 }
81eace42
GK
11754 return;
11755
11756 /* %l is output_asm_label. */
9ebbca7d 11757
9854d9ed
RK
11758 case 'L':
11759 /* Write second word of DImode or DFmode reference. Works on register
11760 or non-indexed memory only. */
11761 if (GET_CODE (x) == REG)
fb5c67a7 11762 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11763 else if (GET_CODE (x) == MEM)
11764 {
11765 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11766 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11767 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11768 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11769 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11770 UNITS_PER_WORD));
6fb5fa3c
DB
11771 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11772 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11773 UNITS_PER_WORD));
9854d9ed 11774 else
d7624dc0
RK
11775 output_address (XEXP (adjust_address_nv (x, SImode,
11776 UNITS_PER_WORD),
11777 0));
ed8908e7 11778
ba5e43aa 11779 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11780 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11781 reg_names[SMALL_DATA_REG]);
9854d9ed 11782 }
9878760c 11783 return;
f676971a 11784
9878760c
RK
11785 case 'm':
11786 /* MB value for a mask operand. */
b1765bde 11787 if (! mask_operand (x, SImode))
9878760c
RK
11788 output_operand_lossage ("invalid %%m value");
11789
0ba1b2ff 11790 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11791 return;
11792
11793 case 'M':
11794 /* ME value for a mask operand. */
b1765bde 11795 if (! mask_operand (x, SImode))
a260abc9 11796 output_operand_lossage ("invalid %%M value");
9878760c 11797
0ba1b2ff 11798 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11799 return;
11800
81eace42
GK
11801 /* %n outputs the negative of its operand. */
11802
9878760c
RK
11803 case 'N':
11804 /* Write the number of elements in the vector times 4. */
11805 if (GET_CODE (x) != PARALLEL)
11806 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11807 else
11808 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11809 return;
11810
11811 case 'O':
11812 /* Similar, but subtract 1 first. */
11813 if (GET_CODE (x) != PARALLEL)
1427100a 11814 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11815 else
11816 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11817 return;
11818
9854d9ed
RK
11819 case 'p':
11820 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11821 if (! INT_P (x)
2bfcf297 11822 || INT_LOWPART (x) < 0
9854d9ed
RK
11823 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11824 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11825 else
11826 fprintf (file, "%d", i);
9854d9ed
RK
11827 return;
11828
9878760c
RK
11829 case 'P':
11830 /* The operand must be an indirect memory reference. The result
8bb418a3 11831 is the register name. */
9878760c
RK
11832 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11833 || REGNO (XEXP (x, 0)) >= 32)
11834 output_operand_lossage ("invalid %%P value");
e2c953b6 11835 else
fb5c67a7 11836 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11837 return;
11838
dfbdccdb
GK
11839 case 'q':
11840 /* This outputs the logical code corresponding to a boolean
11841 expression. The expression may have one or both operands
39a10a29 11842 negated (if one, only the first one). For condition register
c4ad648e
AM
11843 logical operations, it will also treat the negated
11844 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11845 {
63bc1d05 11846 const char *const *t = 0;
dfbdccdb
GK
11847 const char *s;
11848 enum rtx_code code = GET_CODE (x);
11849 static const char * const tbl[3][3] = {
11850 { "and", "andc", "nor" },
11851 { "or", "orc", "nand" },
11852 { "xor", "eqv", "xor" } };
11853
11854 if (code == AND)
11855 t = tbl[0];
11856 else if (code == IOR)
11857 t = tbl[1];
11858 else if (code == XOR)
11859 t = tbl[2];
11860 else
11861 output_operand_lossage ("invalid %%q value");
11862
11863 if (GET_CODE (XEXP (x, 0)) != NOT)
11864 s = t[0];
11865 else
11866 {
11867 if (GET_CODE (XEXP (x, 1)) == NOT)
11868 s = t[2];
11869 else
11870 s = t[1];
11871 }
f676971a 11872
dfbdccdb
GK
11873 fputs (s, file);
11874 }
11875 return;
11876
2c4a9cff
DE
11877 case 'Q':
11878 if (TARGET_MFCRF)
3b6ce0af 11879 fputc (',', file);
5efb1046 11880 /* FALLTHRU */
2c4a9cff
DE
11881 else
11882 return;
11883
9854d9ed
RK
11884 case 'R':
11885 /* X is a CR register. Print the mask for `mtcrf'. */
11886 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11887 output_operand_lossage ("invalid %%R value");
11888 else
9ebbca7d 11889 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11890 return;
9854d9ed
RK
11891
11892 case 's':
11893 /* Low 5 bits of 32 - value */
11894 if (! INT_P (x))
11895 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11896 else
11897 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11898 return;
9854d9ed 11899
a260abc9 11900 case 'S':
0ba1b2ff 11901 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11902 CONST_INT 32-bit mask is considered sign-extended so any
11903 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11904 if (! mask64_operand (x, DImode))
a260abc9
DE
11905 output_operand_lossage ("invalid %%S value");
11906
0ba1b2ff 11907 uval = INT_LOWPART (x);
a260abc9 11908
0ba1b2ff 11909 if (uval & 1) /* Clear Left */
a260abc9 11910 {
f099d360
GK
11911#if HOST_BITS_PER_WIDE_INT > 64
11912 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11913#endif
0ba1b2ff 11914 i = 64;
a260abc9 11915 }
0ba1b2ff 11916 else /* Clear Right */
a260abc9 11917 {
0ba1b2ff 11918 uval = ~uval;
f099d360
GK
11919#if HOST_BITS_PER_WIDE_INT > 64
11920 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11921#endif
0ba1b2ff 11922 i = 63;
a260abc9 11923 }
0ba1b2ff
AM
11924 while (uval != 0)
11925 --i, uval >>= 1;
37409796 11926 gcc_assert (i >= 0);
0ba1b2ff
AM
11927 fprintf (file, "%d", i);
11928 return;
a260abc9 11929
a3170dc6
AH
11930 case 't':
11931 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11932 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11933
11934 /* Bit 3 is OV bit. */
11935 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11936
11937 /* If we want bit 31, write a shift count of zero, not 32. */
11938 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11939 return;
11940
cccf3bdc
DE
11941 case 'T':
11942 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11943 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11944 && REGNO (x) != CTR_REGNO))
cccf3bdc 11945 output_operand_lossage ("invalid %%T value");
1de43f85 11946 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11947 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11948 else
11949 fputs ("ctr", file);
11950 return;
11951
9854d9ed 11952 case 'u':
802a0058 11953 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11954 if (! INT_P (x))
11955 output_operand_lossage ("invalid %%u value");
e2c953b6 11956 else
f676971a 11957 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11958 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11959 return;
11960
802a0058
MM
11961 case 'v':
11962 /* High-order 16 bits of constant for use in signed operand. */
11963 if (! INT_P (x))
11964 output_operand_lossage ("invalid %%v value");
e2c953b6 11965 else
134c32f6
DE
11966 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11967 (INT_LOWPART (x) >> 16) & 0xffff);
11968 return;
802a0058 11969
9854d9ed
RK
11970 case 'U':
11971 /* Print `u' if this has an auto-increment or auto-decrement. */
11972 if (GET_CODE (x) == MEM
11973 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11974 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11975 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11976 putc ('u', file);
9854d9ed 11977 return;
9878760c 11978
e0cd0770
JC
11979 case 'V':
11980 /* Print the trap code for this operand. */
11981 switch (GET_CODE (x))
11982 {
11983 case EQ:
11984 fputs ("eq", file); /* 4 */
11985 break;
11986 case NE:
11987 fputs ("ne", file); /* 24 */
11988 break;
11989 case LT:
11990 fputs ("lt", file); /* 16 */
11991 break;
11992 case LE:
11993 fputs ("le", file); /* 20 */
11994 break;
11995 case GT:
11996 fputs ("gt", file); /* 8 */
11997 break;
11998 case GE:
11999 fputs ("ge", file); /* 12 */
12000 break;
12001 case LTU:
12002 fputs ("llt", file); /* 2 */
12003 break;
12004 case LEU:
12005 fputs ("lle", file); /* 6 */
12006 break;
12007 case GTU:
12008 fputs ("lgt", file); /* 1 */
12009 break;
12010 case GEU:
12011 fputs ("lge", file); /* 5 */
12012 break;
12013 default:
37409796 12014 gcc_unreachable ();
e0cd0770
JC
12015 }
12016 break;
12017
9854d9ed
RK
12018 case 'w':
12019 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12020 normally. */
12021 if (INT_P (x))
f676971a 12022 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12023 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12024 else
12025 print_operand (file, x, 0);
9878760c
RK
12026 return;
12027
9854d9ed 12028 case 'W':
e2c953b6 12029 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12030 val = (GET_CODE (x) == CONST_INT
12031 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12032
12033 if (val < 0)
12034 i = -1;
9854d9ed 12035 else
e2c953b6
DE
12036 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12037 if ((val <<= 1) < 0)
12038 break;
12039
12040#if HOST_BITS_PER_WIDE_INT == 32
12041 if (GET_CODE (x) == CONST_INT && i >= 0)
12042 i += 32; /* zero-extend high-part was all 0's */
12043 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12044 {
12045 val = CONST_DOUBLE_LOW (x);
12046
37409796
NS
12047 gcc_assert (val);
12048 if (val < 0)
e2c953b6
DE
12049 --i;
12050 else
12051 for ( ; i < 64; i++)
12052 if ((val <<= 1) < 0)
12053 break;
12054 }
12055#endif
12056
12057 fprintf (file, "%d", i + 1);
9854d9ed 12058 return;
9878760c 12059
9854d9ed
RK
12060 case 'X':
12061 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12062 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12063 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12064 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12065 putc ('x', file);
9854d9ed 12066 return;
9878760c 12067
9854d9ed
RK
12068 case 'Y':
12069 /* Like 'L', for third word of TImode */
12070 if (GET_CODE (x) == REG)
fb5c67a7 12071 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12072 else if (GET_CODE (x) == MEM)
9878760c 12073 {
9854d9ed
RK
12074 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12075 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12076 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12077 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12078 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12079 else
d7624dc0 12080 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12081 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12082 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12083 reg_names[SMALL_DATA_REG]);
9878760c
RK
12084 }
12085 return;
f676971a 12086
9878760c 12087 case 'z':
b4ac57ab
RS
12088 /* X is a SYMBOL_REF. Write out the name preceded by a
12089 period and without any trailing data in brackets. Used for function
4d30c363
MM
12090 names. If we are configured for System V (or the embedded ABI) on
12091 the PowerPC, do not emit the period, since those systems do not use
12092 TOCs and the like. */
37409796 12093 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12094
c4ad648e
AM
12095 /* Mark the decl as referenced so that cgraph will output the
12096 function. */
9bf6462a 12097 if (SYMBOL_REF_DECL (x))
c4ad648e 12098 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12099
85b776df 12100 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12101 if (TARGET_MACHO)
12102 {
12103 const char *name = XSTR (x, 0);
a031e781 12104#if TARGET_MACHO
3b48085e 12105 if (MACHOPIC_INDIRECT
11abc112
MM
12106 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12107 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12108#endif
12109 assemble_name (file, name);
12110 }
85b776df 12111 else if (!DOT_SYMBOLS)
9739c90c 12112 assemble_name (file, XSTR (x, 0));
85b776df
AM
12113 else
12114 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12115 return;
12116
9854d9ed
RK
12117 case 'Z':
12118 /* Like 'L', for last word of TImode. */
12119 if (GET_CODE (x) == REG)
fb5c67a7 12120 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12121 else if (GET_CODE (x) == MEM)
12122 {
12123 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12124 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12125 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12126 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12127 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12128 else
d7624dc0 12129 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12130 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12131 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12132 reg_names[SMALL_DATA_REG]);
9854d9ed 12133 }
5c23c401 12134 return;
0ac081f6 12135
a3170dc6 12136 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12137 case 'y':
12138 {
12139 rtx tmp;
12140
37409796 12141 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12142
12143 tmp = XEXP (x, 0);
12144
90d3ff1c 12145 /* Ugly hack because %y is overloaded. */
8ef65e3d 12146 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12147 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12148 || GET_MODE (x) == TFmode
12149 || GET_MODE (x) == TImode))
a3170dc6
AH
12150 {
12151 /* Handle [reg]. */
12152 if (GET_CODE (tmp) == REG)
12153 {
12154 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12155 break;
12156 }
12157 /* Handle [reg+UIMM]. */
12158 else if (GET_CODE (tmp) == PLUS &&
12159 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12160 {
12161 int x;
12162
37409796 12163 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12164
12165 x = INTVAL (XEXP (tmp, 1));
12166 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12167 break;
12168 }
12169
12170 /* Fall through. Must be [reg+reg]. */
12171 }
850e8d3d
DN
12172 if (TARGET_ALTIVEC
12173 && GET_CODE (tmp) == AND
12174 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12175 && INTVAL (XEXP (tmp, 1)) == -16)
12176 tmp = XEXP (tmp, 0);
0ac081f6 12177 if (GET_CODE (tmp) == REG)
c62f2db5 12178 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12179 else
0ac081f6 12180 {
37409796 12181 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
12182 && REG_P (XEXP (tmp, 0))
12183 && REG_P (XEXP (tmp, 1)));
bb8df8a6 12184
0ac081f6
AH
12185 if (REGNO (XEXP (tmp, 0)) == 0)
12186 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12187 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12188 else
12189 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12190 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12191 }
0ac081f6
AH
12192 break;
12193 }
f676971a 12194
9878760c
RK
12195 case 0:
12196 if (GET_CODE (x) == REG)
12197 fprintf (file, "%s", reg_names[REGNO (x)]);
12198 else if (GET_CODE (x) == MEM)
12199 {
12200 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12201 know the width from the mode. */
12202 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12203 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12204 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12205 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12206 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12207 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12208 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12209 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12210 else
a54d04b7 12211 output_address (XEXP (x, 0));
9878760c
RK
12212 }
12213 else
a54d04b7 12214 output_addr_const (file, x);
a85d226b 12215 return;
9878760c 12216
c4501e62
JJ
12217 case '&':
12218 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12219 return;
12220
9878760c
RK
12221 default:
12222 output_operand_lossage ("invalid %%xn code");
12223 }
12224}
12225\f
12226/* Print the address of an operand. */
12227
12228void
a2369ed3 12229print_operand_address (FILE *file, rtx x)
9878760c
RK
12230{
12231 if (GET_CODE (x) == REG)
4697a36c 12232 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12233 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12234 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12235 {
12236 output_addr_const (file, x);
ba5e43aa 12237 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12238 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12239 reg_names[SMALL_DATA_REG]);
37409796
NS
12240 else
12241 gcc_assert (!TARGET_TOC);
9878760c
RK
12242 }
12243 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12244 {
9024f4b8 12245 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12246 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12247 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12248 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12249 else
4697a36c
MM
12250 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12251 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12252 }
12253 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12254 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12255 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12256#if TARGET_ELF
12257 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12258 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12259 {
12260 output_addr_const (file, XEXP (x, 1));
12261 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12262 }
c859cda6
DJ
12263#endif
12264#if TARGET_MACHO
12265 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12266 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12267 {
12268 fprintf (file, "lo16(");
12269 output_addr_const (file, XEXP (x, 1));
12270 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12271 }
3cb999d8 12272#endif
4d588c14 12273 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12274 {
2bfcf297 12275 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12276 {
2bfcf297
DB
12277 rtx contains_minus = XEXP (x, 1);
12278 rtx minus, symref;
12279 const char *name;
f676971a 12280
9ebbca7d 12281 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12282 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12283 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12284 contains_minus = XEXP (contains_minus, 0);
12285
2bfcf297
DB
12286 minus = XEXP (contains_minus, 0);
12287 symref = XEXP (minus, 0);
12288 XEXP (contains_minus, 0) = symref;
12289 if (TARGET_ELF)
12290 {
12291 char *newname;
12292
12293 name = XSTR (symref, 0);
12294 newname = alloca (strlen (name) + sizeof ("@toc"));
12295 strcpy (newname, name);
12296 strcat (newname, "@toc");
12297 XSTR (symref, 0) = newname;
12298 }
12299 output_addr_const (file, XEXP (x, 1));
12300 if (TARGET_ELF)
12301 XSTR (symref, 0) = name;
9ebbca7d
GK
12302 XEXP (contains_minus, 0) = minus;
12303 }
12304 else
12305 output_addr_const (file, XEXP (x, 1));
12306
12307 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12308 }
9878760c 12309 else
37409796 12310 gcc_unreachable ();
9878760c
RK
12311}
12312\f
88cad84b 12313/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12314 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12315 is defined. It also needs to handle DI-mode objects on 64-bit
12316 targets. */
12317
12318static bool
a2369ed3 12319rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12320{
f4f4921e 12321#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12322 /* Special handling for SI values. */
84dcde01 12323 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12324 {
301d03af 12325 static int recurse = 0;
f676971a 12326
301d03af
RS
12327 /* For -mrelocatable, we mark all addresses that need to be fixed up
12328 in the .fixup section. */
12329 if (TARGET_RELOCATABLE
d6b5193b
RS
12330 && in_section != toc_section
12331 && in_section != text_section
4325ca90 12332 && !unlikely_text_section_p (in_section)
301d03af
RS
12333 && !recurse
12334 && GET_CODE (x) != CONST_INT
12335 && GET_CODE (x) != CONST_DOUBLE
12336 && CONSTANT_P (x))
12337 {
12338 char buf[256];
12339
12340 recurse = 1;
12341 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12342 fixuplabelno++;
12343 ASM_OUTPUT_LABEL (asm_out_file, buf);
12344 fprintf (asm_out_file, "\t.long\t(");
12345 output_addr_const (asm_out_file, x);
12346 fprintf (asm_out_file, ")@fixup\n");
12347 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12348 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12349 fprintf (asm_out_file, "\t.long\t");
12350 assemble_name (asm_out_file, buf);
12351 fprintf (asm_out_file, "\n\t.previous\n");
12352 recurse = 0;
12353 return true;
12354 }
12355 /* Remove initial .'s to turn a -mcall-aixdesc function
12356 address into the address of the descriptor, not the function
12357 itself. */
12358 else if (GET_CODE (x) == SYMBOL_REF
12359 && XSTR (x, 0)[0] == '.'
12360 && DEFAULT_ABI == ABI_AIX)
12361 {
12362 const char *name = XSTR (x, 0);
12363 while (*name == '.')
12364 name++;
12365
12366 fprintf (asm_out_file, "\t.long\t%s\n", name);
12367 return true;
12368 }
12369 }
f4f4921e 12370#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12371 return default_assemble_integer (x, size, aligned_p);
12372}
93638d7a
AM
12373
12374#ifdef HAVE_GAS_HIDDEN
12375/* Emit an assembler directive to set symbol visibility for DECL to
12376 VISIBILITY_TYPE. */
12377
5add3202 12378static void
a2369ed3 12379rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12380{
93638d7a
AM
12381 /* Functions need to have their entry point symbol visibility set as
12382 well as their descriptor symbol visibility. */
85b776df
AM
12383 if (DEFAULT_ABI == ABI_AIX
12384 && DOT_SYMBOLS
12385 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12386 {
25fdb4dc 12387 static const char * const visibility_types[] = {
c4ad648e 12388 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12389 };
12390
12391 const char *name, *type;
93638d7a
AM
12392
12393 name = ((* targetm.strip_name_encoding)
12394 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12395 type = visibility_types[vis];
93638d7a 12396
25fdb4dc
RH
12397 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12398 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12399 }
25fdb4dc
RH
12400 else
12401 default_assemble_visibility (decl, vis);
93638d7a
AM
12402}
12403#endif
301d03af 12404\f
39a10a29 12405enum rtx_code
a2369ed3 12406rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12407{
12408 /* Reversal of FP compares takes care -- an ordered compare
12409 becomes an unordered compare and vice versa. */
f676971a 12410 if (mode == CCFPmode
bc9ec0e0
GK
12411 && (!flag_finite_math_only
12412 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12413 || code == UNEQ || code == LTGT))
bab6226b 12414 return reverse_condition_maybe_unordered (code);
39a10a29 12415 else
bab6226b 12416 return reverse_condition (code);
39a10a29
GK
12417}
12418
39a10a29
GK
12419/* Generate a compare for CODE. Return a brand-new rtx that
12420 represents the result of the compare. */
a4f6c312 12421
39a10a29 12422static rtx
a2369ed3 12423rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12424{
12425 enum machine_mode comp_mode;
12426 rtx compare_result;
12427
12428 if (rs6000_compare_fp_p)
12429 comp_mode = CCFPmode;
12430 else if (code == GTU || code == LTU
c4ad648e 12431 || code == GEU || code == LEU)
39a10a29 12432 comp_mode = CCUNSmode;
60934f9c
NS
12433 else if ((code == EQ || code == NE)
12434 && GET_CODE (rs6000_compare_op0) == SUBREG
12435 && GET_CODE (rs6000_compare_op1) == SUBREG
12436 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12437 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12438 /* These are unsigned values, perhaps there will be a later
12439 ordering compare that can be shared with this one.
12440 Unfortunately we cannot detect the signedness of the operands
12441 for non-subregs. */
12442 comp_mode = CCUNSmode;
39a10a29
GK
12443 else
12444 comp_mode = CCmode;
12445
12446 /* First, the compare. */
12447 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12448
cef6b86c 12449 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12450 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12451 && rs6000_compare_fp_p)
a3170dc6 12452 {
64022b5d 12453 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12454 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12455
12456 if (op_mode == VOIDmode)
12457 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12458
cef6b86c
EB
12459 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12460 This explains the following mess. */
423c1189 12461
a3170dc6
AH
12462 switch (code)
12463 {
423c1189 12464 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12465 switch (op_mode)
12466 {
12467 case SFmode:
12468 cmp = flag_unsafe_math_optimizations
12469 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12470 rs6000_compare_op1)
12471 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12472 rs6000_compare_op1);
12473 break;
12474
12475 case DFmode:
12476 cmp = flag_unsafe_math_optimizations
12477 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12478 rs6000_compare_op1)
12479 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12480 rs6000_compare_op1);
12481 break;
12482
17caeff2
JM
12483 case TFmode:
12484 cmp = flag_unsafe_math_optimizations
12485 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12486 rs6000_compare_op1)
12487 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12488 rs6000_compare_op1);
12489 break;
12490
37409796
NS
12491 default:
12492 gcc_unreachable ();
12493 }
a3170dc6 12494 break;
bb8df8a6 12495
423c1189 12496 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12497 switch (op_mode)
12498 {
12499 case SFmode:
12500 cmp = flag_unsafe_math_optimizations
12501 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12502 rs6000_compare_op1)
12503 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12504 rs6000_compare_op1);
12505 break;
bb8df8a6 12506
37409796
NS
12507 case DFmode:
12508 cmp = flag_unsafe_math_optimizations
12509 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12510 rs6000_compare_op1)
12511 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12512 rs6000_compare_op1);
12513 break;
12514
17caeff2
JM
12515 case TFmode:
12516 cmp = flag_unsafe_math_optimizations
12517 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12518 rs6000_compare_op1)
12519 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12520 rs6000_compare_op1);
12521 break;
12522
37409796
NS
12523 default:
12524 gcc_unreachable ();
12525 }
a3170dc6 12526 break;
bb8df8a6 12527
423c1189 12528 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12529 switch (op_mode)
12530 {
12531 case SFmode:
12532 cmp = flag_unsafe_math_optimizations
12533 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12534 rs6000_compare_op1)
12535 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12536 rs6000_compare_op1);
12537 break;
bb8df8a6 12538
37409796
NS
12539 case DFmode:
12540 cmp = flag_unsafe_math_optimizations
12541 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12542 rs6000_compare_op1)
12543 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12544 rs6000_compare_op1);
12545 break;
12546
17caeff2
JM
12547 case TFmode:
12548 cmp = flag_unsafe_math_optimizations
12549 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12550 rs6000_compare_op1)
12551 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12552 rs6000_compare_op1);
12553 break;
12554
37409796
NS
12555 default:
12556 gcc_unreachable ();
12557 }
a3170dc6 12558 break;
4d4cbc0e 12559 default:
37409796 12560 gcc_unreachable ();
a3170dc6
AH
12561 }
12562
12563 /* Synthesize LE and GE from LT/GT || EQ. */
12564 if (code == LE || code == GE || code == LEU || code == GEU)
12565 {
a3170dc6
AH
12566 emit_insn (cmp);
12567
12568 switch (code)
12569 {
12570 case LE: code = LT; break;
12571 case GE: code = GT; break;
12572 case LEU: code = LT; break;
12573 case GEU: code = GT; break;
37409796 12574 default: gcc_unreachable ();
a3170dc6
AH
12575 }
12576
a3170dc6
AH
12577 compare_result2 = gen_reg_rtx (CCFPmode);
12578
12579 /* Do the EQ. */
37409796
NS
12580 switch (op_mode)
12581 {
12582 case SFmode:
12583 cmp = flag_unsafe_math_optimizations
12584 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12585 rs6000_compare_op1)
12586 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12587 rs6000_compare_op1);
12588 break;
12589
12590 case DFmode:
12591 cmp = flag_unsafe_math_optimizations
12592 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12593 rs6000_compare_op1)
12594 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12595 rs6000_compare_op1);
12596 break;
12597
17caeff2
JM
12598 case TFmode:
12599 cmp = flag_unsafe_math_optimizations
12600 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12601 rs6000_compare_op1)
12602 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12603 rs6000_compare_op1);
12604 break;
12605
37409796
NS
12606 default:
12607 gcc_unreachable ();
12608 }
a3170dc6
AH
12609 emit_insn (cmp);
12610
a3170dc6 12611 /* OR them together. */
64022b5d
AH
12612 or_result = gen_reg_rtx (CCFPmode);
12613 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12614 compare_result2);
a3170dc6
AH
12615 compare_result = or_result;
12616 code = EQ;
12617 }
12618 else
12619 {
a3170dc6 12620 if (code == NE || code == LTGT)
a3170dc6 12621 code = NE;
423c1189
AH
12622 else
12623 code = EQ;
a3170dc6
AH
12624 }
12625
12626 emit_insn (cmp);
12627 }
12628 else
de17c25f
DE
12629 {
12630 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12631 CLOBBERs to match cmptf_internal2 pattern. */
12632 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12633 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12634 && !TARGET_IEEEQUAD
de17c25f
DE
12635 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12636 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12637 gen_rtvec (9,
12638 gen_rtx_SET (VOIDmode,
12639 compare_result,
12640 gen_rtx_COMPARE (comp_mode,
12641 rs6000_compare_op0,
12642 rs6000_compare_op1)),
12643 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12644 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12645 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12646 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12647 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12648 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12649 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12650 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12651 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12652 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12653 {
12654 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12655 comp_mode = CCEQmode;
12656 compare_result = gen_reg_rtx (CCEQmode);
12657 if (TARGET_64BIT)
12658 emit_insn (gen_stack_protect_testdi (compare_result,
12659 rs6000_compare_op0, op1));
12660 else
12661 emit_insn (gen_stack_protect_testsi (compare_result,
12662 rs6000_compare_op0, op1));
12663 }
de17c25f
DE
12664 else
12665 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12666 gen_rtx_COMPARE (comp_mode,
12667 rs6000_compare_op0,
12668 rs6000_compare_op1)));
12669 }
f676971a 12670
ca5adc63 12671 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12672 under flag_finite_math_only we don't bother. */
39a10a29 12673 if (rs6000_compare_fp_p
e7108df9 12674 && !flag_finite_math_only
8ef65e3d 12675 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12676 && (code == LE || code == GE
12677 || code == UNEQ || code == LTGT
12678 || code == UNGT || code == UNLT))
12679 {
12680 enum rtx_code or1, or2;
12681 rtx or1_rtx, or2_rtx, compare2_rtx;
12682 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12683
39a10a29
GK
12684 switch (code)
12685 {
12686 case LE: or1 = LT; or2 = EQ; break;
12687 case GE: or1 = GT; or2 = EQ; break;
12688 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12689 case LTGT: or1 = LT; or2 = GT; break;
12690 case UNGT: or1 = UNORDERED; or2 = GT; break;
12691 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12692 default: gcc_unreachable ();
39a10a29
GK
12693 }
12694 validate_condition_mode (or1, comp_mode);
12695 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12696 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12697 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12698 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12699 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12700 const_true_rtx);
12701 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12702
12703 compare_result = or_result;
12704 code = EQ;
12705 }
12706
12707 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12708
1c563bed 12709 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12710}
12711
12712
12713/* Emit the RTL for an sCOND pattern. */
12714
12715void
a2369ed3 12716rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12717{
12718 rtx condition_rtx;
12719 enum machine_mode op_mode;
b7053a3f 12720 enum rtx_code cond_code;
39a10a29
GK
12721
12722 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12723 cond_code = GET_CODE (condition_rtx);
12724
8ef65e3d 12725 if (rs6000_compare_fp_p
423c1189
AH
12726 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12727 {
12728 rtx t;
12729
12730 PUT_MODE (condition_rtx, SImode);
12731 t = XEXP (condition_rtx, 0);
12732
37409796 12733 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12734
12735 if (cond_code == NE)
64022b5d 12736 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12737
64022b5d 12738 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12739 return;
12740 }
12741
b7053a3f
GK
12742 if (cond_code == NE
12743 || cond_code == GE || cond_code == LE
12744 || cond_code == GEU || cond_code == LEU
12745 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12746 {
12747 rtx not_result = gen_reg_rtx (CCEQmode);
12748 rtx not_op, rev_cond_rtx;
12749 enum machine_mode cc_mode;
f676971a 12750
b7053a3f
GK
12751 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12752
1c563bed 12753 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12754 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12755 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12756 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12757 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12758 }
39a10a29
GK
12759
12760 op_mode = GET_MODE (rs6000_compare_op0);
12761 if (op_mode == VOIDmode)
12762 op_mode = GET_MODE (rs6000_compare_op1);
12763
12764 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12765 {
12766 PUT_MODE (condition_rtx, DImode);
12767 convert_move (result, condition_rtx, 0);
12768 }
12769 else
12770 {
12771 PUT_MODE (condition_rtx, SImode);
12772 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12773 }
12774}
12775
39a10a29
GK
12776/* Emit a branch of kind CODE to location LOC. */
12777
12778void
a2369ed3 12779rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12780{
12781 rtx condition_rtx, loc_ref;
12782
12783 condition_rtx = rs6000_generate_compare (code);
12784 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12785 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12786 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12787 loc_ref, pc_rtx)));
12788}
12789
12a4e8c5
GK
12790/* Return the string to output a conditional branch to LABEL, which is
12791 the operand number of the label, or -1 if the branch is really a
f676971a 12792 conditional return.
12a4e8c5
GK
12793
12794 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12795 condition code register and its mode specifies what kind of
12796 comparison we made.
12797
a0ab749a 12798 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12799
12800 INSN is the insn. */
12801
12802char *
a2369ed3 12803output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12804{
12805 static char string[64];
12806 enum rtx_code code = GET_CODE (op);
12807 rtx cc_reg = XEXP (op, 0);
12808 enum machine_mode mode = GET_MODE (cc_reg);
12809 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12810 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12811 int really_reversed = reversed ^ need_longbranch;
12812 char *s = string;
12813 const char *ccode;
12814 const char *pred;
12815 rtx note;
12816
39a10a29
GK
12817 validate_condition_mode (code, mode);
12818
12819 /* Work out which way this really branches. We could use
12820 reverse_condition_maybe_unordered here always but this
12821 makes the resulting assembler clearer. */
12a4e8c5 12822 if (really_reversed)
de40e1df
DJ
12823 {
12824 /* Reversal of FP compares takes care -- an ordered compare
12825 becomes an unordered compare and vice versa. */
12826 if (mode == CCFPmode)
12827 code = reverse_condition_maybe_unordered (code);
12828 else
12829 code = reverse_condition (code);
12830 }
12a4e8c5 12831
8ef65e3d 12832 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12833 {
12834 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12835 to the GT bit. */
37409796
NS
12836 switch (code)
12837 {
12838 case EQ:
12839 /* Opposite of GT. */
12840 code = GT;
12841 break;
12842
12843 case NE:
12844 code = UNLE;
12845 break;
12846
12847 default:
12848 gcc_unreachable ();
12849 }
a3170dc6
AH
12850 }
12851
39a10a29 12852 switch (code)
12a4e8c5
GK
12853 {
12854 /* Not all of these are actually distinct opcodes, but
12855 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12856 case NE: case LTGT:
12857 ccode = "ne"; break;
12858 case EQ: case UNEQ:
12859 ccode = "eq"; break;
f676971a 12860 case GE: case GEU:
50a0b056 12861 ccode = "ge"; break;
f676971a 12862 case GT: case GTU: case UNGT:
50a0b056 12863 ccode = "gt"; break;
f676971a 12864 case LE: case LEU:
50a0b056 12865 ccode = "le"; break;
f676971a 12866 case LT: case LTU: case UNLT:
50a0b056 12867 ccode = "lt"; break;
12a4e8c5
GK
12868 case UNORDERED: ccode = "un"; break;
12869 case ORDERED: ccode = "nu"; break;
12870 case UNGE: ccode = "nl"; break;
12871 case UNLE: ccode = "ng"; break;
12872 default:
37409796 12873 gcc_unreachable ();
12a4e8c5 12874 }
f676971a
EC
12875
12876 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12877 The old mnemonics don't have a way to specify this information. */
f4857b9b 12878 pred = "";
12a4e8c5
GK
12879 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12880 if (note != NULL_RTX)
12881 {
12882 /* PROB is the difference from 50%. */
12883 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12884
12885 /* Only hint for highly probable/improbable branches on newer
12886 cpus as static prediction overrides processor dynamic
12887 prediction. For older cpus we may as well always hint, but
12888 assume not taken for branches that are very close to 50% as a
12889 mispredicted taken branch is more expensive than a
f676971a 12890 mispredicted not-taken branch. */
ec507f2d 12891 if (rs6000_always_hint
2c9e13f3
JH
12892 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12893 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12894 {
12895 if (abs (prob) > REG_BR_PROB_BASE / 20
12896 && ((prob > 0) ^ need_longbranch))
c4ad648e 12897 pred = "+";
f4857b9b
AM
12898 else
12899 pred = "-";
12900 }
12a4e8c5 12901 }
12a4e8c5
GK
12902
12903 if (label == NULL)
94a54f47 12904 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12905 else
94a54f47 12906 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12907
37c67319 12908 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12909 Assume they'd only be the first character.... */
37c67319
GK
12910 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12911 *s++ = '%';
94a54f47 12912 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12913
12914 if (label != NULL)
12915 {
12916 /* If the branch distance was too far, we may have to use an
12917 unconditional branch to go the distance. */
12918 if (need_longbranch)
44518ddd 12919 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12920 else
12921 s += sprintf (s, ",%s", label);
12922 }
12923
12924 return string;
12925}
50a0b056 12926
64022b5d 12927/* Return the string to flip the GT bit on a CR. */
423c1189 12928char *
64022b5d 12929output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12930{
12931 static char string[64];
12932 int a, b;
12933
37409796
NS
12934 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12935 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12936
64022b5d
AH
12937 /* GT bit. */
12938 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12939 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12940
12941 sprintf (string, "crnot %d,%d", a, b);
12942 return string;
12943}
12944
21213b4c
DP
12945/* Return insn index for the vector compare instruction for given CODE,
12946 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12947 not available. */
12948
12949static int
94ff898d 12950get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12951 enum machine_mode dest_mode,
12952 enum machine_mode op_mode)
12953{
12954 if (!TARGET_ALTIVEC)
12955 return INSN_NOT_AVAILABLE;
12956
12957 switch (code)
12958 {
12959 case EQ:
12960 if (dest_mode == V16QImode && op_mode == V16QImode)
12961 return UNSPEC_VCMPEQUB;
12962 if (dest_mode == V8HImode && op_mode == V8HImode)
12963 return UNSPEC_VCMPEQUH;
12964 if (dest_mode == V4SImode && op_mode == V4SImode)
12965 return UNSPEC_VCMPEQUW;
12966 if (dest_mode == V4SImode && op_mode == V4SFmode)
12967 return UNSPEC_VCMPEQFP;
12968 break;
12969 case GE:
12970 if (dest_mode == V4SImode && op_mode == V4SFmode)
12971 return UNSPEC_VCMPGEFP;
12972 case GT:
12973 if (dest_mode == V16QImode && op_mode == V16QImode)
12974 return UNSPEC_VCMPGTSB;
12975 if (dest_mode == V8HImode && op_mode == V8HImode)
12976 return UNSPEC_VCMPGTSH;
12977 if (dest_mode == V4SImode && op_mode == V4SImode)
12978 return UNSPEC_VCMPGTSW;
12979 if (dest_mode == V4SImode && op_mode == V4SFmode)
12980 return UNSPEC_VCMPGTFP;
12981 break;
12982 case GTU:
12983 if (dest_mode == V16QImode && op_mode == V16QImode)
12984 return UNSPEC_VCMPGTUB;
12985 if (dest_mode == V8HImode && op_mode == V8HImode)
12986 return UNSPEC_VCMPGTUH;
12987 if (dest_mode == V4SImode && op_mode == V4SImode)
12988 return UNSPEC_VCMPGTUW;
12989 break;
12990 default:
12991 break;
12992 }
12993 return INSN_NOT_AVAILABLE;
12994}
12995
12996/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12997 DMODE is expected destination mode. This is a recursive function. */
12998
12999static rtx
13000rs6000_emit_vector_compare (enum rtx_code rcode,
13001 rtx op0, rtx op1,
13002 enum machine_mode dmode)
13003{
13004 int vec_cmp_insn;
13005 rtx mask;
13006 enum machine_mode dest_mode;
13007 enum machine_mode op_mode = GET_MODE (op1);
13008
37409796
NS
13009 gcc_assert (TARGET_ALTIVEC);
13010 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13011
13012 /* Floating point vector compare instructions uses destination V4SImode.
13013 Move destination to appropriate mode later. */
13014 if (dmode == V4SFmode)
13015 dest_mode = V4SImode;
13016 else
13017 dest_mode = dmode;
13018
13019 mask = gen_reg_rtx (dest_mode);
13020 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13021
13022 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13023 {
13024 bool swap_operands = false;
13025 bool try_again = false;
13026 switch (rcode)
13027 {
13028 case LT:
13029 rcode = GT;
13030 swap_operands = true;
13031 try_again = true;
13032 break;
13033 case LTU:
13034 rcode = GTU;
13035 swap_operands = true;
13036 try_again = true;
13037 break;
13038 case NE:
370df7db
JC
13039 case UNLE:
13040 case UNLT:
13041 case UNGE:
13042 case UNGT:
13043 /* Invert condition and try again.
13044 e.g., A != B becomes ~(A==B). */
21213b4c 13045 {
370df7db 13046 enum rtx_code rev_code;
21213b4c 13047 enum insn_code nor_code;
d1123cde 13048 rtx eq_rtx;
370df7db
JC
13049
13050 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13051 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13052 dest_mode);
94ff898d 13053
166cdb08 13054 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13055 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13056 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13057
13058 if (dmode != dest_mode)
13059 {
13060 rtx temp = gen_reg_rtx (dest_mode);
13061 convert_move (temp, mask, 0);
13062 return temp;
13063 }
13064 return mask;
13065 }
13066 break;
13067 case GE:
13068 case GEU:
13069 case LE:
13070 case LEU:
13071 /* Try GT/GTU/LT/LTU OR EQ */
13072 {
13073 rtx c_rtx, eq_rtx;
13074 enum insn_code ior_code;
13075 enum rtx_code new_code;
13076
37409796
NS
13077 switch (rcode)
13078 {
13079 case GE:
13080 new_code = GT;
13081 break;
13082
13083 case GEU:
13084 new_code = GTU;
13085 break;
13086
13087 case LE:
13088 new_code = LT;
13089 break;
13090
13091 case LEU:
13092 new_code = LTU;
13093 break;
13094
13095 default:
13096 gcc_unreachable ();
13097 }
21213b4c
DP
13098
13099 c_rtx = rs6000_emit_vector_compare (new_code,
13100 op0, op1, dest_mode);
13101 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13102 dest_mode);
13103
166cdb08 13104 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13105 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13106 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13107 if (dmode != dest_mode)
13108 {
13109 rtx temp = gen_reg_rtx (dest_mode);
13110 convert_move (temp, mask, 0);
13111 return temp;
13112 }
13113 return mask;
13114 }
13115 break;
13116 default:
37409796 13117 gcc_unreachable ();
21213b4c
DP
13118 }
13119
13120 if (try_again)
13121 {
13122 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13123 /* You only get two chances. */
13124 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13125 }
13126
13127 if (swap_operands)
13128 {
13129 rtx tmp;
13130 tmp = op0;
13131 op0 = op1;
13132 op1 = tmp;
13133 }
13134 }
13135
915167f5
GK
13136 emit_insn (gen_rtx_SET (VOIDmode, mask,
13137 gen_rtx_UNSPEC (dest_mode,
13138 gen_rtvec (2, op0, op1),
13139 vec_cmp_insn)));
21213b4c
DP
13140 if (dmode != dest_mode)
13141 {
13142 rtx temp = gen_reg_rtx (dest_mode);
13143 convert_move (temp, mask, 0);
13144 return temp;
13145 }
13146 return mask;
13147}
13148
13149/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13150 valid insn doesn exist for given mode. */
13151
13152static int
13153get_vsel_insn (enum machine_mode mode)
13154{
13155 switch (mode)
13156 {
13157 case V4SImode:
13158 return UNSPEC_VSEL4SI;
13159 break;
13160 case V4SFmode:
13161 return UNSPEC_VSEL4SF;
13162 break;
13163 case V8HImode:
13164 return UNSPEC_VSEL8HI;
13165 break;
13166 case V16QImode:
13167 return UNSPEC_VSEL16QI;
13168 break;
13169 default:
13170 return INSN_NOT_AVAILABLE;
13171 break;
13172 }
13173 return INSN_NOT_AVAILABLE;
13174}
13175
13176/* Emit vector select insn where DEST is destination using
13177 operands OP1, OP2 and MASK. */
13178
13179static void
13180rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13181{
13182 rtx t, temp;
13183 enum machine_mode dest_mode = GET_MODE (dest);
13184 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13185
13186 temp = gen_reg_rtx (dest_mode);
94ff898d 13187
bb8df8a6 13188 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13189 select op2. */
915167f5
GK
13190 t = gen_rtx_SET (VOIDmode, temp,
13191 gen_rtx_UNSPEC (dest_mode,
13192 gen_rtvec (3, op2, op1, mask),
13193 vsel_insn_index));
21213b4c
DP
13194 emit_insn (t);
13195 emit_move_insn (dest, temp);
13196 return;
13197}
13198
94ff898d 13199/* Emit vector conditional expression.
21213b4c
DP
13200 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13201 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13202
13203int
13204rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13205 rtx cond, rtx cc_op0, rtx cc_op1)
13206{
13207 enum machine_mode dest_mode = GET_MODE (dest);
13208 enum rtx_code rcode = GET_CODE (cond);
13209 rtx mask;
13210
13211 if (!TARGET_ALTIVEC)
13212 return 0;
13213
13214 /* Get the vector mask for the given relational operations. */
13215 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13216
13217 rs6000_emit_vector_select (dest, op1, op2, mask);
13218
13219 return 1;
13220}
13221
50a0b056
GK
13222/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13223 operands of the last comparison is nonzero/true, FALSE_COND if it
13224 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13225
50a0b056 13226int
a2369ed3 13227rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13228{
13229 enum rtx_code code = GET_CODE (op);
13230 rtx op0 = rs6000_compare_op0;
13231 rtx op1 = rs6000_compare_op1;
13232 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13233 enum machine_mode compare_mode = GET_MODE (op0);
13234 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13235 rtx temp;
add2402e 13236 bool is_against_zero;
50a0b056 13237
a3c9585f 13238 /* These modes should always match. */
a3170dc6
AH
13239 if (GET_MODE (op1) != compare_mode
13240 /* In the isel case however, we can use a compare immediate, so
13241 op1 may be a small constant. */
13242 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13243 return 0;
178c3eff 13244 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13245 return 0;
178c3eff 13246 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13247 return 0;
13248
50a0b056 13249 /* First, work out if the hardware can do this at all, or
a3c9585f 13250 if it's too slow.... */
50a0b056 13251 if (! rs6000_compare_fp_p)
a3170dc6
AH
13252 {
13253 if (TARGET_ISEL)
13254 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13255 return 0;
13256 }
8ef65e3d 13257 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13258 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13259 return 0;
50a0b056 13260
add2402e 13261 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13262
add2402e
GK
13263 /* A floating-point subtract might overflow, underflow, or produce
13264 an inexact result, thus changing the floating-point flags, so it
13265 can't be generated if we care about that. It's safe if one side
13266 of the construct is zero, since then no subtract will be
13267 generated. */
ebb109ad 13268 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13269 && flag_trapping_math && ! is_against_zero)
13270 return 0;
13271
50a0b056
GK
13272 /* Eliminate half of the comparisons by switching operands, this
13273 makes the remaining code simpler. */
13274 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13275 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13276 {
13277 code = reverse_condition_maybe_unordered (code);
13278 temp = true_cond;
13279 true_cond = false_cond;
13280 false_cond = temp;
13281 }
13282
13283 /* UNEQ and LTGT take four instructions for a comparison with zero,
13284 it'll probably be faster to use a branch here too. */
bc9ec0e0 13285 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13286 return 0;
f676971a 13287
50a0b056
GK
13288 if (GET_CODE (op1) == CONST_DOUBLE)
13289 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13290
b6d08ca1 13291 /* We're going to try to implement comparisons by performing
50a0b056
GK
13292 a subtract, then comparing against zero. Unfortunately,
13293 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13294 know that the operand is finite and the comparison
50a0b056 13295 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13296 if (HONOR_INFINITIES (compare_mode)
50a0b056 13297 && code != GT && code != UNGE
045572c7 13298 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13299 /* Constructs of the form (a OP b ? a : b) are safe. */
13300 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13301 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13302 && ! rtx_equal_p (op1, true_cond))))
13303 return 0;
add2402e 13304
50a0b056
GK
13305 /* At this point we know we can use fsel. */
13306
13307 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13308 if (! is_against_zero)
13309 {
13310 temp = gen_reg_rtx (compare_mode);
13311 emit_insn (gen_rtx_SET (VOIDmode, temp,
13312 gen_rtx_MINUS (compare_mode, op0, op1)));
13313 op0 = temp;
13314 op1 = CONST0_RTX (compare_mode);
13315 }
50a0b056
GK
13316
13317 /* If we don't care about NaNs we can reduce some of the comparisons
13318 down to faster ones. */
bc9ec0e0 13319 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13320 switch (code)
13321 {
13322 case GT:
13323 code = LE;
13324 temp = true_cond;
13325 true_cond = false_cond;
13326 false_cond = temp;
13327 break;
13328 case UNGE:
13329 code = GE;
13330 break;
13331 case UNEQ:
13332 code = EQ;
13333 break;
13334 default:
13335 break;
13336 }
13337
13338 /* Now, reduce everything down to a GE. */
13339 switch (code)
13340 {
13341 case GE:
13342 break;
13343
13344 case LE:
3148ad6d
DJ
13345 temp = gen_reg_rtx (compare_mode);
13346 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13347 op0 = temp;
13348 break;
13349
13350 case ORDERED:
3148ad6d
DJ
13351 temp = gen_reg_rtx (compare_mode);
13352 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13353 op0 = temp;
13354 break;
13355
13356 case EQ:
3148ad6d 13357 temp = gen_reg_rtx (compare_mode);
f676971a 13358 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13359 gen_rtx_NEG (compare_mode,
13360 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13361 op0 = temp;
13362 break;
13363
13364 case UNGE:
bc9ec0e0 13365 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13366 temp = gen_reg_rtx (result_mode);
50a0b056 13367 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13368 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13369 gen_rtx_GE (VOIDmode,
13370 op0, op1),
13371 true_cond, false_cond)));
bc9ec0e0
GK
13372 false_cond = true_cond;
13373 true_cond = temp;
50a0b056 13374
3148ad6d
DJ
13375 temp = gen_reg_rtx (compare_mode);
13376 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13377 op0 = temp;
13378 break;
13379
13380 case GT:
bc9ec0e0 13381 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13382 temp = gen_reg_rtx (result_mode);
50a0b056 13383 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13384 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13385 gen_rtx_GE (VOIDmode,
13386 op0, op1),
13387 true_cond, false_cond)));
bc9ec0e0
GK
13388 true_cond = false_cond;
13389 false_cond = temp;
50a0b056 13390
3148ad6d
DJ
13391 temp = gen_reg_rtx (compare_mode);
13392 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13393 op0 = temp;
13394 break;
13395
13396 default:
37409796 13397 gcc_unreachable ();
50a0b056
GK
13398 }
13399
13400 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13401 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13402 gen_rtx_GE (VOIDmode,
13403 op0, op1),
13404 true_cond, false_cond)));
13405 return 1;
13406}
13407
a3170dc6
AH
13408/* Same as above, but for ints (isel). */
13409
13410static int
a2369ed3 13411rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13412{
13413 rtx condition_rtx, cr;
13414
13415 /* All isel implementations thus far are 32-bits. */
13416 if (GET_MODE (rs6000_compare_op0) != SImode)
13417 return 0;
13418
13419 /* We still have to do the compare, because isel doesn't do a
13420 compare, it just looks at the CRx bits set by a previous compare
13421 instruction. */
13422 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13423 cr = XEXP (condition_rtx, 0);
13424
13425 if (GET_MODE (cr) == CCmode)
13426 emit_insn (gen_isel_signed (dest, condition_rtx,
13427 true_cond, false_cond, cr));
13428 else
13429 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13430 true_cond, false_cond, cr));
13431
13432 return 1;
13433}
13434
13435const char *
a2369ed3 13436output_isel (rtx *operands)
a3170dc6
AH
13437{
13438 enum rtx_code code;
13439
13440 code = GET_CODE (operands[1]);
13441 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13442 {
13443 PUT_CODE (operands[1], reverse_condition (code));
13444 return "isel %0,%3,%2,%j1";
13445 }
13446 else
13447 return "isel %0,%2,%3,%j1";
13448}
13449
50a0b056 13450void
a2369ed3 13451rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13452{
13453 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13454 enum rtx_code c;
50a0b056 13455 rtx target;
5dc8d536
AH
13456
13457 if (code == SMAX || code == SMIN)
13458 c = GE;
13459 else
13460 c = GEU;
13461
50a0b056 13462 if (code == SMAX || code == UMAX)
f676971a 13463 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13464 op0, op1, mode, 0);
13465 else
f676971a 13466 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13467 op1, op0, mode, 0);
37409796 13468 gcc_assert (target);
50a0b056
GK
13469 if (target != dest)
13470 emit_move_insn (dest, target);
13471}
46c07df8 13472
915167f5
GK
13473/* Emit instructions to perform a load-reserved/store-conditional operation.
13474 The operation performed is an atomic
13475 (set M (CODE:MODE M OP))
13476 If not NULL, BEFORE is atomically set to M before the operation, and
13477 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13478 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13479 Either OP or M may be wrapped in a NOT operation. */
13480
13481void
13482rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13483 rtx m, rtx op, rtx before_param, rtx after_param,
13484 bool sync_p)
13485{
13486 enum machine_mode used_mode;
13487 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13488 rtx used_m;
13489 rtvec vec;
13490 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13491 rtx shift = NULL_RTX;
bb8df8a6 13492
915167f5
GK
13493 if (sync_p)
13494 emit_insn (gen_memory_barrier ());
bb8df8a6 13495
915167f5
GK
13496 if (GET_CODE (m) == NOT)
13497 used_m = XEXP (m, 0);
13498 else
13499 used_m = m;
13500
13501 /* If this is smaller than SImode, we'll have to use SImode with
13502 adjustments. */
13503 if (mode == QImode || mode == HImode)
13504 {
13505 rtx newop, oldop;
13506
13507 if (MEM_ALIGN (used_m) >= 32)
13508 {
13509 int ishift = 0;
13510 if (BYTES_BIG_ENDIAN)
13511 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13512
915167f5 13513 shift = GEN_INT (ishift);
c75c6d11 13514 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13515 }
13516 else
13517 {
13518 rtx addrSI, aligned_addr;
a9c9d3fa 13519 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13520
c75c6d11
JJ
13521 addrSI = gen_lowpart_common (SImode,
13522 force_reg (Pmode, XEXP (used_m, 0)));
13523 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13524 shift = gen_reg_rtx (SImode);
13525
13526 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13527 GEN_INT (shift_mask)));
13528 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13529
13530 aligned_addr = expand_binop (Pmode, and_optab,
13531 XEXP (used_m, 0),
13532 GEN_INT (-4), NULL_RTX,
13533 1, OPTAB_LIB_WIDEN);
13534 used_m = change_address (used_m, SImode, aligned_addr);
13535 set_mem_align (used_m, 32);
915167f5 13536 }
c75c6d11
JJ
13537 /* It's safe to keep the old alias set of USED_M, because
13538 the operation is atomic and only affects the original
13539 USED_M. */
13540 if (GET_CODE (m) == NOT)
13541 m = gen_rtx_NOT (SImode, used_m);
13542 else
13543 m = used_m;
915167f5
GK
13544
13545 if (GET_CODE (op) == NOT)
13546 {
13547 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13548 oldop = gen_rtx_NOT (SImode, oldop);
13549 }
13550 else
13551 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13552
915167f5
GK
13553 switch (code)
13554 {
13555 case IOR:
13556 case XOR:
13557 newop = expand_binop (SImode, and_optab,
13558 oldop, GEN_INT (imask), NULL_RTX,
13559 1, OPTAB_LIB_WIDEN);
13560 emit_insn (gen_ashlsi3 (newop, newop, shift));
13561 break;
13562
13563 case AND:
13564 newop = expand_binop (SImode, ior_optab,
13565 oldop, GEN_INT (~imask), NULL_RTX,
13566 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13567 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13568 break;
13569
13570 case PLUS:
9f0076e5 13571 case MINUS:
915167f5
GK
13572 {
13573 rtx mask;
bb8df8a6 13574
915167f5
GK
13575 newop = expand_binop (SImode, and_optab,
13576 oldop, GEN_INT (imask), NULL_RTX,
13577 1, OPTAB_LIB_WIDEN);
13578 emit_insn (gen_ashlsi3 (newop, newop, shift));
13579
13580 mask = gen_reg_rtx (SImode);
13581 emit_move_insn (mask, GEN_INT (imask));
13582 emit_insn (gen_ashlsi3 (mask, mask, shift));
13583
9f0076e5
DE
13584 if (code == PLUS)
13585 newop = gen_rtx_PLUS (SImode, m, newop);
13586 else
13587 newop = gen_rtx_MINUS (SImode, m, newop);
13588 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13589 newop = gen_rtx_IOR (SImode, newop,
13590 gen_rtx_AND (SImode,
13591 gen_rtx_NOT (SImode, mask),
13592 m));
13593 break;
13594 }
13595
13596 default:
13597 gcc_unreachable ();
13598 }
13599
a9c9d3fa
GK
13600 if (GET_CODE (m) == NOT)
13601 {
13602 rtx mask, xorm;
13603
13604 mask = gen_reg_rtx (SImode);
13605 emit_move_insn (mask, GEN_INT (imask));
13606 emit_insn (gen_ashlsi3 (mask, mask, shift));
13607
13608 xorm = gen_rtx_XOR (SImode, used_m, mask);
13609 /* Depending on the value of 'op', the XOR or the operation might
13610 be able to be simplified away. */
13611 newop = simplify_gen_binary (code, SImode, xorm, newop);
13612 }
915167f5
GK
13613 op = newop;
13614 used_mode = SImode;
13615 before = gen_reg_rtx (used_mode);
13616 after = gen_reg_rtx (used_mode);
13617 }
13618 else
13619 {
13620 used_mode = mode;
13621 before = before_param;
13622 after = after_param;
13623
13624 if (before == NULL_RTX)
13625 before = gen_reg_rtx (used_mode);
13626 if (after == NULL_RTX)
13627 after = gen_reg_rtx (used_mode);
13628 }
bb8df8a6 13629
9f0076e5
DE
13630 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13631 && used_mode != mode)
915167f5
GK
13632 the_op = op; /* Computed above. */
13633 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13634 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13635 else
13636 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13637
13638 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13639 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13640 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13641 gen_rtx_UNSPEC (used_mode,
13642 gen_rtvec (1, the_op),
13643 UNSPEC_SYNC_OP));
915167f5
GK
13644 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13645
9f0076e5 13646 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13647 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13648 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13649 else
13650 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13651 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13652
13653 /* Shift and mask the return values properly. */
13654 if (used_mode != mode && before_param)
13655 {
13656 emit_insn (gen_lshrsi3 (before, before, shift));
13657 convert_move (before_param, before, 1);
13658 }
13659
13660 if (used_mode != mode && after_param)
13661 {
13662 emit_insn (gen_lshrsi3 (after, after, shift));
13663 convert_move (after_param, after, 1);
13664 }
13665
13666 /* The previous sequence will end with a branch that's dependent on
13667 the conditional store, so placing an isync will ensure that no
13668 other instructions (especially, no load or store instructions)
13669 can start before the atomic operation completes. */
13670 if (sync_p)
13671 emit_insn (gen_isync ());
13672}
13673
b52110d4
DE
13674/* A subroutine of the atomic operation splitters. Jump to LABEL if
13675 COND is true. Mark the jump as unlikely to be taken. */
13676
13677static void
13678emit_unlikely_jump (rtx cond, rtx label)
13679{
13680 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13681 rtx x;
13682
13683 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13684 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13685 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13686}
13687
13688/* A subroutine of the atomic operation splitters. Emit a load-locked
13689 instruction in MODE. */
13690
13691static void
13692emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13693{
13694 rtx (*fn) (rtx, rtx) = NULL;
13695 if (mode == SImode)
13696 fn = gen_load_locked_si;
13697 else if (mode == DImode)
13698 fn = gen_load_locked_di;
13699 emit_insn (fn (reg, mem));
13700}
13701
13702/* A subroutine of the atomic operation splitters. Emit a store-conditional
13703 instruction in MODE. */
13704
13705static void
13706emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13707{
13708 rtx (*fn) (rtx, rtx, rtx) = NULL;
13709 if (mode == SImode)
13710 fn = gen_store_conditional_si;
13711 else if (mode == DImode)
13712 fn = gen_store_conditional_di;
13713
9f0076e5 13714 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13715 if (PPC405_ERRATUM77)
13716 emit_insn (gen_memory_barrier ());
13717
13718 emit_insn (fn (res, mem, val));
13719}
13720
ea2c620c 13721/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13722 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13723 operand of the binary operator. BEFORE and AFTER are optional locations to
13724 return the value of MEM either before of after the operation. SCRATCH is
13725 a scratch register. */
13726
13727void
13728rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13729 rtx before, rtx after, rtx scratch)
13730{
13731 enum machine_mode mode = GET_MODE (mem);
13732 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13733
13734 emit_insn (gen_memory_barrier ());
13735
13736 label = gen_label_rtx ();
13737 emit_label (label);
13738 label = gen_rtx_LABEL_REF (VOIDmode, label);
13739
13740 if (before == NULL_RTX)
13741 before = scratch;
13742 emit_load_locked (mode, before, mem);
13743
13744 if (code == NOT)
13745 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13746 else if (code == AND)
13747 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13748 else
13749 x = gen_rtx_fmt_ee (code, mode, before, val);
13750
13751 if (after != NULL_RTX)
13752 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13753 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13754
13755 emit_store_conditional (mode, cond, mem, scratch);
13756
13757 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13758 emit_unlikely_jump (x, label);
13759
13760 emit_insn (gen_isync ());
13761}
13762
b52110d4
DE
13763/* Expand an atomic compare and swap operation. MEM is the memory on which
13764 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13765 value to be stored. SCRATCH is a scratch GPR. */
13766
13767void
13768rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13769 rtx scratch)
13770{
13771 enum machine_mode mode = GET_MODE (mem);
13772 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13773
13774 emit_insn (gen_memory_barrier ());
13775
13776 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13777 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13778 emit_label (XEXP (label1, 0));
13779
13780 emit_load_locked (mode, retval, mem);
13781
13782 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13783 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13784
13785 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13786 emit_unlikely_jump (x, label2);
13787
13788 emit_move_insn (scratch, newval);
13789 emit_store_conditional (mode, cond, mem, scratch);
13790
13791 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13792 emit_unlikely_jump (x, label1);
13793
13794 emit_insn (gen_isync ());
13795 emit_label (XEXP (label2, 0));
13796}
13797
13798/* Expand an atomic test and set operation. MEM is the memory on which
13799 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13800
13801void
13802rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13803{
13804 enum machine_mode mode = GET_MODE (mem);
13805 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13806
13807 emit_insn (gen_memory_barrier ());
13808
13809 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13810 emit_label (XEXP (label, 0));
13811
13812 emit_load_locked (mode, retval, mem);
13813 emit_move_insn (scratch, val);
13814 emit_store_conditional (mode, cond, mem, scratch);
13815
13816 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13817 emit_unlikely_jump (x, label);
13818
13819 emit_insn (gen_isync ());
13820}
13821
9fc75b97
DE
13822void
13823rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13824{
13825 enum machine_mode mode = GET_MODE (mem);
13826 rtx addrSI, align, wdst, shift, mask;
13827 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13828 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13829
13830 /* Shift amount for subword relative to aligned word. */
13831 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13832 shift = gen_reg_rtx (SImode);
13833 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13834 GEN_INT (shift_mask)));
13835 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13836
13837 /* Shift and mask old value into position within word. */
13838 oldval = convert_modes (SImode, mode, oldval, 1);
13839 oldval = expand_binop (SImode, and_optab,
13840 oldval, GEN_INT (imask), NULL_RTX,
13841 1, OPTAB_LIB_WIDEN);
13842 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13843
13844 /* Shift and mask new value into position within word. */
13845 newval = convert_modes (SImode, mode, newval, 1);
13846 newval = expand_binop (SImode, and_optab,
13847 newval, GEN_INT (imask), NULL_RTX,
13848 1, OPTAB_LIB_WIDEN);
13849 emit_insn (gen_ashlsi3 (newval, newval, shift));
13850
13851 /* Mask for insertion. */
13852 mask = gen_reg_rtx (SImode);
13853 emit_move_insn (mask, GEN_INT (imask));
13854 emit_insn (gen_ashlsi3 (mask, mask, shift));
13855
13856 /* Address of aligned word containing subword. */
13857 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13858 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13859 mem = change_address (mem, SImode, align);
13860 set_mem_align (mem, 32);
13861 MEM_VOLATILE_P (mem) = 1;
13862
13863 wdst = gen_reg_rtx (SImode);
13864 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13865 oldval, newval, mem));
13866
13867 emit_move_insn (dst, gen_lowpart (mode, wdst));
13868}
13869
13870void
13871rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13872 rtx oldval, rtx newval, rtx mem,
13873 rtx scratch)
13874{
13875 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13876
13877 emit_insn (gen_memory_barrier ());
13878 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13879 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13880 emit_label (XEXP (label1, 0));
13881
13882 emit_load_locked (SImode, scratch, mem);
13883
13884 /* Mask subword within loaded value for comparison with oldval.
13885 Use UNSPEC_AND to avoid clobber.*/
13886 emit_insn (gen_rtx_SET (SImode, dest,
13887 gen_rtx_UNSPEC (SImode,
13888 gen_rtvec (2, scratch, mask),
13889 UNSPEC_AND)));
13890
13891 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13892 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13893
13894 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13895 emit_unlikely_jump (x, label2);
13896
13897 /* Clear subword within loaded value for insertion of new value. */
13898 emit_insn (gen_rtx_SET (SImode, scratch,
13899 gen_rtx_AND (SImode,
13900 gen_rtx_NOT (SImode, mask), scratch)));
13901 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13902 emit_store_conditional (SImode, cond, mem, scratch);
13903
13904 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13905 emit_unlikely_jump (x, label1);
13906
13907 emit_insn (gen_isync ());
13908 emit_label (XEXP (label2, 0));
13909}
13910
13911
b52110d4 13912 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13913 multi-register moves. It will emit at most one instruction for
13914 each register that is accessed; that is, it won't emit li/lis pairs
13915 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13916 register. */
46c07df8 13917
46c07df8 13918void
a9baceb1 13919rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13920{
a9baceb1
GK
13921 /* The register number of the first register being moved. */
13922 int reg;
13923 /* The mode that is to be moved. */
13924 enum machine_mode mode;
13925 /* The mode that the move is being done in, and its size. */
13926 enum machine_mode reg_mode;
13927 int reg_mode_size;
13928 /* The number of registers that will be moved. */
13929 int nregs;
13930
13931 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13932 mode = GET_MODE (dst);
c8b622ff 13933 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13934 if (FP_REGNO_P (reg))
7393f7f8 13935 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13936 else if (ALTIVEC_REGNO_P (reg))
13937 reg_mode = V16QImode;
4d4447b5
PB
13938 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13939 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13940 else
13941 reg_mode = word_mode;
13942 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13943
37409796 13944 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13945
a9baceb1
GK
13946 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13947 {
13948 /* Move register range backwards, if we might have destructive
13949 overlap. */
13950 int i;
13951 for (i = nregs - 1; i >= 0; i--)
f676971a 13952 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13953 simplify_gen_subreg (reg_mode, dst, mode,
13954 i * reg_mode_size),
13955 simplify_gen_subreg (reg_mode, src, mode,
13956 i * reg_mode_size)));
13957 }
46c07df8
HP
13958 else
13959 {
a9baceb1
GK
13960 int i;
13961 int j = -1;
13962 bool used_update = false;
46c07df8 13963
c1e55850 13964 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13965 {
13966 rtx breg;
3a1f863f 13967
a9baceb1
GK
13968 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13969 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13970 {
13971 rtx delta_rtx;
a9baceb1 13972 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13973 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13974 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13975 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13976 emit_insn (TARGET_32BIT
13977 ? gen_addsi3 (breg, breg, delta_rtx)
13978 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13979 src = replace_equiv_address (src, breg);
3a1f863f 13980 }
d04b6e6e 13981 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13982 {
13e2e16e 13983 rtx basereg;
c1e55850
GK
13984 basereg = gen_rtx_REG (Pmode, reg);
13985 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13986 src = replace_equiv_address (src, basereg);
c1e55850 13987 }
3a1f863f 13988
0423421f
AM
13989 breg = XEXP (src, 0);
13990 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13991 breg = XEXP (breg, 0);
13992
13993 /* If the base register we are using to address memory is
13994 also a destination reg, then change that register last. */
13995 if (REG_P (breg)
13996 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13997 && REGNO (breg) < REGNO (dst) + nregs)
13998 j = REGNO (breg) - REGNO (dst);
c4ad648e 13999 }
46c07df8 14000
a9baceb1 14001 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14002 {
14003 rtx breg;
14004
a9baceb1
GK
14005 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14006 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14007 {
14008 rtx delta_rtx;
a9baceb1 14009 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14010 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14011 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14012 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14013
14014 /* We have to update the breg before doing the store.
14015 Use store with update, if available. */
14016
14017 if (TARGET_UPDATE)
14018 {
a9baceb1 14019 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14020 emit_insn (TARGET_32BIT
14021 ? (TARGET_POWERPC64
14022 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14023 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14024 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14025 used_update = true;
3a1f863f
DE
14026 }
14027 else
a9baceb1
GK
14028 emit_insn (TARGET_32BIT
14029 ? gen_addsi3 (breg, breg, delta_rtx)
14030 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14031 dst = replace_equiv_address (dst, breg);
3a1f863f 14032 }
37409796 14033 else
d04b6e6e 14034 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14035 }
14036
46c07df8 14037 for (i = 0; i < nregs; i++)
f676971a 14038 {
3a1f863f
DE
14039 /* Calculate index to next subword. */
14040 ++j;
f676971a 14041 if (j == nregs)
3a1f863f 14042 j = 0;
46c07df8 14043
112cdef5 14044 /* If compiler already emitted move of first word by
a9baceb1 14045 store with update, no need to do anything. */
3a1f863f 14046 if (j == 0 && used_update)
a9baceb1 14047 continue;
f676971a 14048
a9baceb1
GK
14049 emit_insn (gen_rtx_SET (VOIDmode,
14050 simplify_gen_subreg (reg_mode, dst, mode,
14051 j * reg_mode_size),
14052 simplify_gen_subreg (reg_mode, src, mode,
14053 j * reg_mode_size)));
3a1f863f 14054 }
46c07df8
HP
14055 }
14056}
14057
12a4e8c5 14058\f
a4f6c312
SS
14059/* This page contains routines that are used to determine what the
14060 function prologue and epilogue code will do and write them out. */
9878760c 14061
a4f6c312
SS
14062/* Return the first fixed-point register that is required to be
14063 saved. 32 if none. */
9878760c
RK
14064
14065int
863d938c 14066first_reg_to_save (void)
9878760c
RK
14067{
14068 int first_reg;
14069
14070 /* Find lowest numbered live register. */
14071 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14072 if (df_regs_ever_live_p (first_reg)
a38d360d 14073 && (! call_used_regs[first_reg]
1db02437 14074 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14075 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14076 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14077 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14078 break;
14079
ee890fe2 14080#if TARGET_MACHO
93638d7a
AM
14081 if (flag_pic
14082 && current_function_uses_pic_offset_table
14083 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14084 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14085#endif
14086
9878760c
RK
14087 return first_reg;
14088}
14089
14090/* Similar, for FP regs. */
14091
14092int
863d938c 14093first_fp_reg_to_save (void)
9878760c
RK
14094{
14095 int first_reg;
14096
14097 /* Find lowest numbered live register. */
14098 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14099 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14100 break;
14101
14102 return first_reg;
14103}
00b960c7
AH
14104
14105/* Similar, for AltiVec regs. */
14106
14107static int
863d938c 14108first_altivec_reg_to_save (void)
00b960c7
AH
14109{
14110 int i;
14111
14112 /* Stack frame remains as is unless we are in AltiVec ABI. */
14113 if (! TARGET_ALTIVEC_ABI)
14114 return LAST_ALTIVEC_REGNO + 1;
14115
22fa69da 14116 /* On Darwin, the unwind routines are compiled without
982afe02 14117 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14118 altivec registers when necessary. */
14119 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14120 && ! TARGET_ALTIVEC)
14121 return FIRST_ALTIVEC_REGNO + 20;
14122
00b960c7
AH
14123 /* Find lowest numbered live register. */
14124 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14125 if (df_regs_ever_live_p (i))
00b960c7
AH
14126 break;
14127
14128 return i;
14129}
14130
14131/* Return a 32-bit mask of the AltiVec registers we need to set in
14132 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14133 the 32-bit word is 0. */
14134
14135static unsigned int
863d938c 14136compute_vrsave_mask (void)
00b960c7
AH
14137{
14138 unsigned int i, mask = 0;
14139
22fa69da 14140 /* On Darwin, the unwind routines are compiled without
982afe02 14141 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14142 call-saved altivec registers when necessary. */
14143 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14144 && ! TARGET_ALTIVEC)
14145 mask |= 0xFFF;
14146
00b960c7
AH
14147 /* First, find out if we use _any_ altivec registers. */
14148 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14149 if (df_regs_ever_live_p (i))
00b960c7
AH
14150 mask |= ALTIVEC_REG_BIT (i);
14151
14152 if (mask == 0)
14153 return mask;
14154
00b960c7
AH
14155 /* Next, remove the argument registers from the set. These must
14156 be in the VRSAVE mask set by the caller, so we don't need to add
14157 them in again. More importantly, the mask we compute here is
14158 used to generate CLOBBERs in the set_vrsave insn, and we do not
14159 wish the argument registers to die. */
a6cf80f2 14160 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14161 mask &= ~ALTIVEC_REG_BIT (i);
14162
14163 /* Similarly, remove the return value from the set. */
14164 {
14165 bool yes = false;
14166 diddle_return_value (is_altivec_return_reg, &yes);
14167 if (yes)
14168 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14169 }
14170
14171 return mask;
14172}
14173
d62294f5 14174/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14175 size of prologues/epilogues by calling our own save/restore-the-world
14176 routines. */
d62294f5
FJ
14177
14178static void
f57fe068
AM
14179compute_save_world_info (rs6000_stack_t *info_ptr)
14180{
14181 info_ptr->world_save_p = 1;
14182 info_ptr->world_save_p
14183 = (WORLD_SAVE_P (info_ptr)
14184 && DEFAULT_ABI == ABI_DARWIN
14185 && ! (current_function_calls_setjmp && flag_exceptions)
14186 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14187 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14188 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14189 && info_ptr->cr_save_p);
f676971a 14190
d62294f5
FJ
14191 /* This will not work in conjunction with sibcalls. Make sure there
14192 are none. (This check is expensive, but seldom executed.) */
f57fe068 14193 if (WORLD_SAVE_P (info_ptr))
f676971a 14194 {
d62294f5
FJ
14195 rtx insn;
14196 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14197 if ( GET_CODE (insn) == CALL_INSN
14198 && SIBLING_CALL_P (insn))
14199 {
14200 info_ptr->world_save_p = 0;
14201 break;
14202 }
d62294f5 14203 }
f676971a 14204
f57fe068 14205 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14206 {
14207 /* Even if we're not touching VRsave, make sure there's room on the
14208 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14209 will attempt to save it. */
d62294f5
FJ
14210 info_ptr->vrsave_size = 4;
14211
14212 /* "Save" the VRsave register too if we're saving the world. */
14213 if (info_ptr->vrsave_mask == 0)
c4ad648e 14214 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14215
14216 /* Because the Darwin register save/restore routines only handle
c4ad648e 14217 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14218 check. */
37409796
NS
14219 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14220 && (info_ptr->first_altivec_reg_save
14221 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14222 }
f676971a 14223 return;
d62294f5
FJ
14224}
14225
14226
00b960c7 14227static void
a2369ed3 14228is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14229{
14230 bool *yes = (bool *) xyes;
14231 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14232 *yes = true;
14233}
14234
4697a36c
MM
14235\f
14236/* Calculate the stack information for the current function. This is
14237 complicated by having two separate calling sequences, the AIX calling
14238 sequence and the V.4 calling sequence.
14239
592696dd 14240 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14241 32-bit 64-bit
4697a36c 14242 SP----> +---------------------------------------+
a260abc9 14243 | back chain to caller | 0 0
4697a36c 14244 +---------------------------------------+
a260abc9 14245 | saved CR | 4 8 (8-11)
4697a36c 14246 +---------------------------------------+
a260abc9 14247 | saved LR | 8 16
4697a36c 14248 +---------------------------------------+
a260abc9 14249 | reserved for compilers | 12 24
4697a36c 14250 +---------------------------------------+
a260abc9 14251 | reserved for binders | 16 32
4697a36c 14252 +---------------------------------------+
a260abc9 14253 | saved TOC pointer | 20 40
4697a36c 14254 +---------------------------------------+
a260abc9 14255 | Parameter save area (P) | 24 48
4697a36c 14256 +---------------------------------------+
a260abc9 14257 | Alloca space (A) | 24+P etc.
802a0058 14258 +---------------------------------------+
a7df97e6 14259 | Local variable space (L) | 24+P+A
4697a36c 14260 +---------------------------------------+
a7df97e6 14261 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14262 +---------------------------------------+
00b960c7
AH
14263 | Save area for AltiVec registers (W) | 24+P+A+L+X
14264 +---------------------------------------+
14265 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14266 +---------------------------------------+
14267 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14268 +---------------------------------------+
00b960c7
AH
14269 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14270 +---------------------------------------+
14271 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14272 +---------------------------------------+
14273 old SP->| back chain to caller's caller |
14274 +---------------------------------------+
14275
5376a30c
KR
14276 The required alignment for AIX configurations is two words (i.e., 8
14277 or 16 bytes).
14278
14279
4697a36c
MM
14280 V.4 stack frames look like:
14281
14282 SP----> +---------------------------------------+
14283 | back chain to caller | 0
14284 +---------------------------------------+
5eb387b8 14285 | caller's saved LR | 4
4697a36c
MM
14286 +---------------------------------------+
14287 | Parameter save area (P) | 8
14288 +---------------------------------------+
a7df97e6 14289 | Alloca space (A) | 8+P
f676971a 14290 +---------------------------------------+
a7df97e6 14291 | Varargs save area (V) | 8+P+A
f676971a 14292 +---------------------------------------+
a7df97e6 14293 | Local variable space (L) | 8+P+A+V
f676971a 14294 +---------------------------------------+
a7df97e6 14295 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14296 +---------------------------------------+
00b960c7
AH
14297 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14298 +---------------------------------------+
14299 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14300 +---------------------------------------+
14301 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14302 +---------------------------------------+
c4ad648e
AM
14303 | SPE: area for 64-bit GP registers |
14304 +---------------------------------------+
14305 | SPE alignment padding |
14306 +---------------------------------------+
00b960c7 14307 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14308 +---------------------------------------+
00b960c7 14309 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14310 +---------------------------------------+
00b960c7 14311 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14312 +---------------------------------------+
14313 old SP->| back chain to caller's caller |
14314 +---------------------------------------+
b6c9286a 14315
5376a30c
KR
14316 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14317 given. (But note below and in sysv4.h that we require only 8 and
14318 may round up the size of our stack frame anyways. The historical
14319 reason is early versions of powerpc-linux which didn't properly
14320 align the stack at program startup. A happy side-effect is that
14321 -mno-eabi libraries can be used with -meabi programs.)
14322
50d440bc 14323 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14324 the stack alignment requirements may differ. If -mno-eabi is not
14325 given, the required stack alignment is 8 bytes; if -mno-eabi is
14326 given, the required alignment is 16 bytes. (But see V.4 comment
14327 above.) */
4697a36c 14328
61b2fbe7
MM
14329#ifndef ABI_STACK_BOUNDARY
14330#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14331#endif
14332
d1d0c603 14333static rs6000_stack_t *
863d938c 14334rs6000_stack_info (void)
4697a36c 14335{
022123e6 14336 static rs6000_stack_t info;
4697a36c 14337 rs6000_stack_t *info_ptr = &info;
327e5343 14338 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14339 int ehrd_size;
64045029 14340 int save_align;
8070c91a 14341 int first_gp;
44688022 14342 HOST_WIDE_INT non_fixed_size;
4697a36c 14343
022123e6 14344 memset (&info, 0, sizeof (info));
4697a36c 14345
c19de7aa
AH
14346 if (TARGET_SPE)
14347 {
14348 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14349 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14350 cfun->machine->insn_chain_scanned_p
14351 = spe_func_has_64bit_regs_p () + 1;
14352 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14353 }
14354
a4f6c312 14355 /* Select which calling sequence. */
178274da 14356 info_ptr->abi = DEFAULT_ABI;
9878760c 14357
a4f6c312 14358 /* Calculate which registers need to be saved & save area size. */
4697a36c 14359 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14360 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14361 even if it currently looks like we won't. Reload may need it to
14362 get at a constant; if so, it will have already created a constant
14363 pool entry for it. */
2bfcf297 14364 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14365 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14366 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14367 && current_function_uses_const_pool
1db02437 14368 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14369 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14370 else
8070c91a
DJ
14371 first_gp = info_ptr->first_gp_reg_save;
14372
14373 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14374
a3170dc6
AH
14375 /* For the SPE, we have an additional upper 32-bits on each GPR.
14376 Ideally we should save the entire 64-bits only when the upper
14377 half is used in SIMD instructions. Since we only record
14378 registers live (not the size they are used in), this proves
14379 difficult because we'd have to traverse the instruction chain at
14380 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14381 so we opt to save the GPRs in 64-bits always if but one register
14382 gets used in 64-bits. Otherwise, all the registers in the frame
14383 get saved in 32-bits.
a3170dc6 14384
c19de7aa 14385 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14386 traditional GP save area will be empty. */
c19de7aa 14387 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14388 info_ptr->gp_size = 0;
14389
4697a36c
MM
14390 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14391 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14392
00b960c7
AH
14393 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14394 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14395 - info_ptr->first_altivec_reg_save);
14396
592696dd 14397 /* Does this function call anything? */
71f123ca
FS
14398 info_ptr->calls_p = (! current_function_is_leaf
14399 || cfun->machine->ra_needs_full_frame);
b6c9286a 14400
a4f6c312 14401 /* Determine if we need to save the link register. */
022123e6
AM
14402 if ((DEFAULT_ABI == ABI_AIX
14403 && current_function_profile
14404 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14405#ifdef TARGET_RELOCATABLE
14406 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14407#endif
14408 || (info_ptr->first_fp_reg_save != 64
14409 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14410 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14411 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14412 || info_ptr->calls_p
14413 || rs6000_ra_ever_killed ())
4697a36c
MM
14414 {
14415 info_ptr->lr_save_p = 1;
1de43f85 14416 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14417 }
14418
9ebbca7d 14419 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14420 if (df_regs_ever_live_p (CR2_REGNO)
14421 || df_regs_ever_live_p (CR3_REGNO)
14422 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14423 {
14424 info_ptr->cr_save_p = 1;
178274da 14425 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14426 info_ptr->cr_size = reg_size;
14427 }
14428
83720594
RH
14429 /* If the current function calls __builtin_eh_return, then we need
14430 to allocate stack space for registers that will hold data for
14431 the exception handler. */
14432 if (current_function_calls_eh_return)
14433 {
14434 unsigned int i;
14435 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14436 continue;
a3170dc6
AH
14437
14438 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14439 ehrd_size = i * (TARGET_SPE_ABI
14440 && info_ptr->spe_64bit_regs_used != 0
14441 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14442 }
14443 else
14444 ehrd_size = 0;
14445
592696dd 14446 /* Determine various sizes. */
4697a36c
MM
14447 info_ptr->reg_size = reg_size;
14448 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14449 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14450 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14451 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14452 if (FRAME_GROWS_DOWNWARD)
14453 info_ptr->vars_size
5b667039
JJ
14454 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14455 + info_ptr->parm_size,
7d5175e1 14456 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14457 - (info_ptr->fixed_size + info_ptr->vars_size
14458 + info_ptr->parm_size);
00b960c7 14459
c19de7aa 14460 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14461 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14462 else
14463 info_ptr->spe_gp_size = 0;
14464
4d774ff8
HP
14465 if (TARGET_ALTIVEC_ABI)
14466 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14467 else
4d774ff8
HP
14468 info_ptr->vrsave_mask = 0;
14469
14470 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14471 info_ptr->vrsave_size = 4;
14472 else
14473 info_ptr->vrsave_size = 0;
b6c9286a 14474
d62294f5
FJ
14475 compute_save_world_info (info_ptr);
14476
592696dd 14477 /* Calculate the offsets. */
178274da 14478 switch (DEFAULT_ABI)
4697a36c 14479 {
b6c9286a 14480 case ABI_NONE:
24d304eb 14481 default:
37409796 14482 gcc_unreachable ();
b6c9286a
MM
14483
14484 case ABI_AIX:
ee890fe2 14485 case ABI_DARWIN:
b6c9286a
MM
14486 info_ptr->fp_save_offset = - info_ptr->fp_size;
14487 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14488
14489 if (TARGET_ALTIVEC_ABI)
14490 {
14491 info_ptr->vrsave_save_offset
14492 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14493
982afe02 14494 /* Align stack so vector save area is on a quadword boundary.
9278121c 14495 The padding goes above the vectors. */
00b960c7
AH
14496 if (info_ptr->altivec_size != 0)
14497 info_ptr->altivec_padding_size
9278121c 14498 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14499 else
14500 info_ptr->altivec_padding_size = 0;
14501
14502 info_ptr->altivec_save_offset
14503 = info_ptr->vrsave_save_offset
14504 - info_ptr->altivec_padding_size
14505 - info_ptr->altivec_size;
9278121c
GK
14506 gcc_assert (info_ptr->altivec_size == 0
14507 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14508
14509 /* Adjust for AltiVec case. */
14510 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14511 }
14512 else
14513 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14514 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14515 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14516 break;
14517
14518 case ABI_V4:
b6c9286a
MM
14519 info_ptr->fp_save_offset = - info_ptr->fp_size;
14520 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14521 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14522
c19de7aa 14523 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14524 {
14525 /* Align stack so SPE GPR save area is aligned on a
14526 double-word boundary. */
14527 if (info_ptr->spe_gp_size != 0)
14528 info_ptr->spe_padding_size
14529 = 8 - (-info_ptr->cr_save_offset % 8);
14530 else
14531 info_ptr->spe_padding_size = 0;
14532
14533 info_ptr->spe_gp_save_offset
14534 = info_ptr->cr_save_offset
14535 - info_ptr->spe_padding_size
14536 - info_ptr->spe_gp_size;
14537
14538 /* Adjust for SPE case. */
022123e6 14539 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14540 }
a3170dc6 14541 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14542 {
14543 info_ptr->vrsave_save_offset
14544 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14545
14546 /* Align stack so vector save area is on a quadword boundary. */
14547 if (info_ptr->altivec_size != 0)
14548 info_ptr->altivec_padding_size
14549 = 16 - (-info_ptr->vrsave_save_offset % 16);
14550 else
14551 info_ptr->altivec_padding_size = 0;
14552
14553 info_ptr->altivec_save_offset
14554 = info_ptr->vrsave_save_offset
14555 - info_ptr->altivec_padding_size
14556 - info_ptr->altivec_size;
14557
14558 /* Adjust for AltiVec case. */
022123e6 14559 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14560 }
14561 else
022123e6
AM
14562 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14563 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14564 info_ptr->lr_save_offset = reg_size;
14565 break;
4697a36c
MM
14566 }
14567
64045029 14568 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14569 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14570 + info_ptr->gp_size
14571 + info_ptr->altivec_size
14572 + info_ptr->altivec_padding_size
a3170dc6
AH
14573 + info_ptr->spe_gp_size
14574 + info_ptr->spe_padding_size
00b960c7
AH
14575 + ehrd_size
14576 + info_ptr->cr_size
022123e6 14577 + info_ptr->vrsave_size,
64045029 14578 save_align);
00b960c7 14579
44688022 14580 non_fixed_size = (info_ptr->vars_size
ff381587 14581 + info_ptr->parm_size
5b667039 14582 + info_ptr->save_size);
ff381587 14583
44688022
AM
14584 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14585 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14586
14587 /* Determine if we need to allocate any stack frame:
14588
a4f6c312
SS
14589 For AIX we need to push the stack if a frame pointer is needed
14590 (because the stack might be dynamically adjusted), if we are
14591 debugging, if we make calls, or if the sum of fp_save, gp_save,
14592 and local variables are more than the space needed to save all
14593 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14594 + 18*8 = 288 (GPR13 reserved).
ff381587 14595
a4f6c312
SS
14596 For V.4 we don't have the stack cushion that AIX uses, but assume
14597 that the debugger can handle stackless frames. */
ff381587
MM
14598
14599 if (info_ptr->calls_p)
14600 info_ptr->push_p = 1;
14601
178274da 14602 else if (DEFAULT_ABI == ABI_V4)
44688022 14603 info_ptr->push_p = non_fixed_size != 0;
ff381587 14604
178274da
AM
14605 else if (frame_pointer_needed)
14606 info_ptr->push_p = 1;
14607
14608 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14609 info_ptr->push_p = 1;
14610
ff381587 14611 else
44688022 14612 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14613
a4f6c312 14614 /* Zero offsets if we're not saving those registers. */
8dda1a21 14615 if (info_ptr->fp_size == 0)
4697a36c
MM
14616 info_ptr->fp_save_offset = 0;
14617
8dda1a21 14618 if (info_ptr->gp_size == 0)
4697a36c
MM
14619 info_ptr->gp_save_offset = 0;
14620
00b960c7
AH
14621 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14622 info_ptr->altivec_save_offset = 0;
14623
14624 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14625 info_ptr->vrsave_save_offset = 0;
14626
c19de7aa
AH
14627 if (! TARGET_SPE_ABI
14628 || info_ptr->spe_64bit_regs_used == 0
14629 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14630 info_ptr->spe_gp_save_offset = 0;
14631
c81fc13e 14632 if (! info_ptr->lr_save_p)
4697a36c
MM
14633 info_ptr->lr_save_offset = 0;
14634
c81fc13e 14635 if (! info_ptr->cr_save_p)
4697a36c
MM
14636 info_ptr->cr_save_offset = 0;
14637
14638 return info_ptr;
14639}
14640
c19de7aa
AH
14641/* Return true if the current function uses any GPRs in 64-bit SIMD
14642 mode. */
14643
14644static bool
863d938c 14645spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14646{
14647 rtx insns, insn;
14648
14649 /* Functions that save and restore all the call-saved registers will
14650 need to save/restore the registers in 64-bits. */
14651 if (current_function_calls_eh_return
14652 || current_function_calls_setjmp
14653 || current_function_has_nonlocal_goto)
14654 return true;
14655
14656 insns = get_insns ();
14657
14658 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14659 {
14660 if (INSN_P (insn))
14661 {
14662 rtx i;
14663
b5a5beb9
AH
14664 /* FIXME: This should be implemented with attributes...
14665
14666 (set_attr "spe64" "true")....then,
14667 if (get_spe64(insn)) return true;
14668
14669 It's the only reliable way to do the stuff below. */
14670
c19de7aa 14671 i = PATTERN (insn);
f82f556d
AH
14672 if (GET_CODE (i) == SET)
14673 {
14674 enum machine_mode mode = GET_MODE (SET_SRC (i));
14675
14676 if (SPE_VECTOR_MODE (mode))
14677 return true;
4d4447b5
PB
14678 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14679 || mode == DDmode || mode == TDmode))
f82f556d
AH
14680 return true;
14681 }
c19de7aa
AH
14682 }
14683 }
14684
14685 return false;
14686}
14687
d1d0c603 14688static void
a2369ed3 14689debug_stack_info (rs6000_stack_t *info)
9878760c 14690{
d330fd93 14691 const char *abi_string;
24d304eb 14692
c81fc13e 14693 if (! info)
4697a36c
MM
14694 info = rs6000_stack_info ();
14695
14696 fprintf (stderr, "\nStack information for function %s:\n",
14697 ((current_function_decl && DECL_NAME (current_function_decl))
14698 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14699 : "<unknown>"));
14700
24d304eb
RK
14701 switch (info->abi)
14702 {
b6c9286a
MM
14703 default: abi_string = "Unknown"; break;
14704 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14705 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14706 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14707 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14708 }
14709
14710 fprintf (stderr, "\tABI = %5s\n", abi_string);
14711
00b960c7
AH
14712 if (TARGET_ALTIVEC_ABI)
14713 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14714
a3170dc6
AH
14715 if (TARGET_SPE_ABI)
14716 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14717
4697a36c
MM
14718 if (info->first_gp_reg_save != 32)
14719 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14720
14721 if (info->first_fp_reg_save != 64)
14722 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14723
00b960c7
AH
14724 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14725 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14726 info->first_altivec_reg_save);
14727
4697a36c
MM
14728 if (info->lr_save_p)
14729 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14730
4697a36c
MM
14731 if (info->cr_save_p)
14732 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14733
00b960c7
AH
14734 if (info->vrsave_mask)
14735 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14736
4697a36c
MM
14737 if (info->push_p)
14738 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14739
14740 if (info->calls_p)
14741 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14742
4697a36c
MM
14743 if (info->gp_save_offset)
14744 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14745
14746 if (info->fp_save_offset)
14747 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14748
00b960c7
AH
14749 if (info->altivec_save_offset)
14750 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14751 info->altivec_save_offset);
14752
a3170dc6
AH
14753 if (info->spe_gp_save_offset)
14754 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14755 info->spe_gp_save_offset);
14756
00b960c7
AH
14757 if (info->vrsave_save_offset)
14758 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14759 info->vrsave_save_offset);
14760
4697a36c
MM
14761 if (info->lr_save_offset)
14762 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14763
14764 if (info->cr_save_offset)
14765 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14766
14767 if (info->varargs_save_offset)
14768 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14769
14770 if (info->total_size)
d1d0c603
JJ
14771 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14772 info->total_size);
4697a36c 14773
4697a36c 14774 if (info->vars_size)
d1d0c603
JJ
14775 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14776 info->vars_size);
4697a36c
MM
14777
14778 if (info->parm_size)
14779 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14780
14781 if (info->fixed_size)
14782 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14783
14784 if (info->gp_size)
14785 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14786
a3170dc6
AH
14787 if (info->spe_gp_size)
14788 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14789
4697a36c
MM
14790 if (info->fp_size)
14791 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14792
00b960c7
AH
14793 if (info->altivec_size)
14794 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14795
14796 if (info->vrsave_size)
14797 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14798
14799 if (info->altivec_padding_size)
14800 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14801 info->altivec_padding_size);
14802
a3170dc6
AH
14803 if (info->spe_padding_size)
14804 fprintf (stderr, "\tspe_padding_size = %5d\n",
14805 info->spe_padding_size);
14806
4697a36c
MM
14807 if (info->cr_size)
14808 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14809
14810 if (info->save_size)
14811 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14812
14813 if (info->reg_size != 4)
14814 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14815
14816 fprintf (stderr, "\n");
9878760c 14817}
71f123ca
FS
14818
14819rtx
a2369ed3 14820rs6000_return_addr (int count, rtx frame)
71f123ca 14821{
a4f6c312
SS
14822 /* Currently we don't optimize very well between prolog and body
14823 code and for PIC code the code can be actually quite bad, so
14824 don't try to be too clever here. */
f1384257 14825 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14826 {
14827 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14828
14829 return
14830 gen_rtx_MEM
14831 (Pmode,
14832 memory_address
14833 (Pmode,
14834 plus_constant (copy_to_reg
14835 (gen_rtx_MEM (Pmode,
14836 memory_address (Pmode, frame))),
14837 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14838 }
14839
8c29550d 14840 cfun->machine->ra_need_lr = 1;
1de43f85 14841 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14842}
14843
5e1bf043
DJ
14844/* Say whether a function is a candidate for sibcall handling or not.
14845 We do not allow indirect calls to be optimized into sibling calls.
14846 Also, we can't do it if there are any vector parameters; there's
14847 nowhere to put the VRsave code so it works; note that functions with
14848 vector parameters are required to have a prototype, so the argument
14849 type info must be available here. (The tail recursion case can work
14850 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14851static bool
a2369ed3 14852rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14853{
14854 tree type;
4977bab6 14855 if (decl)
5e1bf043
DJ
14856 {
14857 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14858 {
4977bab6 14859 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14860 type; type = TREE_CHAIN (type))
14861 {
c15b529f 14862 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14863 return false;
5e1bf043 14864 }
c4ad648e 14865 }
5e1bf043 14866 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14867 || ((*targetm.binds_local_p) (decl)
14868 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14869 {
4977bab6 14870 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14871
14872 if (!lookup_attribute ("longcall", attr_list)
14873 || lookup_attribute ("shortcall", attr_list))
4977bab6 14874 return true;
2bcc50d0 14875 }
5e1bf043 14876 }
4977bab6 14877 return false;
5e1bf043
DJ
14878}
14879
e7e64a25
AS
14880/* NULL if INSN insn is valid within a low-overhead loop.
14881 Otherwise return why doloop cannot be applied.
9419649c
DE
14882 PowerPC uses the COUNT register for branch on table instructions. */
14883
e7e64a25 14884static const char *
3101faab 14885rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14886{
14887 if (CALL_P (insn))
e7e64a25 14888 return "Function call in the loop.";
9419649c
DE
14889
14890 if (JUMP_P (insn)
14891 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14892 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14893 return "Computed branch in the loop.";
9419649c 14894
e7e64a25 14895 return NULL;
9419649c
DE
14896}
14897
71f123ca 14898static int
863d938c 14899rs6000_ra_ever_killed (void)
71f123ca
FS
14900{
14901 rtx top;
5e1bf043
DJ
14902 rtx reg;
14903 rtx insn;
71f123ca 14904
dd292d0a 14905 if (current_function_is_thunk)
71f123ca 14906 return 0;
eb0424da 14907
36f7e964
AH
14908 /* regs_ever_live has LR marked as used if any sibcalls are present,
14909 but this should not force saving and restoring in the
14910 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14911 clobbers LR, so that is inappropriate. */
36f7e964 14912
5e1bf043
DJ
14913 /* Also, the prologue can generate a store into LR that
14914 doesn't really count, like this:
36f7e964 14915
5e1bf043
DJ
14916 move LR->R0
14917 bcl to set PIC register
14918 move LR->R31
14919 move R0->LR
36f7e964
AH
14920
14921 When we're called from the epilogue, we need to avoid counting
14922 this as a store. */
f676971a 14923
71f123ca
FS
14924 push_topmost_sequence ();
14925 top = get_insns ();
14926 pop_topmost_sequence ();
1de43f85 14927 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14928
5e1bf043
DJ
14929 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14930 {
14931 if (INSN_P (insn))
14932 {
022123e6
AM
14933 if (CALL_P (insn))
14934 {
14935 if (!SIBLING_CALL_P (insn))
14936 return 1;
14937 }
1de43f85 14938 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14939 return 1;
36f7e964
AH
14940 else if (set_of (reg, insn) != NULL_RTX
14941 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14942 return 1;
14943 }
14944 }
14945 return 0;
71f123ca 14946}
4697a36c 14947\f
9ebbca7d 14948/* Emit instructions needed to load the TOC register.
c7ca610e 14949 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14950 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14951
14952void
a2369ed3 14953rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14954{
6fb5fa3c 14955 rtx dest;
1db02437 14956 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14957
7f970b70 14958 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14959 {
7f970b70 14960 char buf[30];
e65a3857 14961 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14962
14963 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14964 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14965 if (flag_pic == 2)
14966 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14967 else
14968 got = rs6000_got_sym ();
14969 tmp1 = tmp2 = dest;
14970 if (!fromprolog)
14971 {
14972 tmp1 = gen_reg_rtx (Pmode);
14973 tmp2 = gen_reg_rtx (Pmode);
14974 }
6fb5fa3c
DB
14975 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14976 emit_move_insn (tmp1,
1de43f85 14977 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14978 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14979 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14980 }
14981 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14982 {
6fb5fa3c 14983 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14984 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14985 }
14986 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14987 {
14988 char buf[30];
20b71b17
AM
14989 rtx temp0 = (fromprolog
14990 ? gen_rtx_REG (Pmode, 0)
14991 : gen_reg_rtx (Pmode));
20b71b17 14992
20b71b17
AM
14993 if (fromprolog)
14994 {
ccbca5e4 14995 rtx symF, symL;
38c1f2d7 14996
20b71b17
AM
14997 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14998 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14999
20b71b17
AM
15000 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15001 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15002
6fb5fa3c
DB
15003 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15004 emit_move_insn (dest,
1de43f85 15005 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15006 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15007 }
15008 else
20b71b17
AM
15009 {
15010 rtx tocsym;
20b71b17
AM
15011
15012 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15013 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15014 emit_move_insn (dest,
1de43f85 15015 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15016 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15017 }
6fb5fa3c 15018 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15019 }
20b71b17
AM
15020 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15021 {
15022 /* This is for AIX code running in non-PIC ELF32. */
15023 char buf[30];
15024 rtx realsym;
15025 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15026 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15027
6fb5fa3c
DB
15028 emit_insn (gen_elf_high (dest, realsym));
15029 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15030 }
37409796 15031 else
9ebbca7d 15032 {
37409796 15033 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15034
9ebbca7d 15035 if (TARGET_32BIT)
6fb5fa3c 15036 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15037 else
6fb5fa3c 15038 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15039 }
15040}
15041
d1d0c603
JJ
15042/* Emit instructions to restore the link register after determining where
15043 its value has been stored. */
15044
15045void
15046rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15047{
15048 rs6000_stack_t *info = rs6000_stack_info ();
15049 rtx operands[2];
15050
15051 operands[0] = source;
15052 operands[1] = scratch;
15053
15054 if (info->lr_save_p)
15055 {
15056 rtx frame_rtx = stack_pointer_rtx;
15057 HOST_WIDE_INT sp_offset = 0;
15058 rtx tmp;
15059
15060 if (frame_pointer_needed
15061 || current_function_calls_alloca
15062 || info->total_size > 32767)
15063 {
0be76840 15064 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15065 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15066 frame_rtx = operands[1];
15067 }
15068 else if (info->push_p)
15069 sp_offset = info->total_size;
15070
15071 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15072 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15073 emit_move_insn (tmp, operands[0]);
15074 }
15075 else
1de43f85 15076 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15077}
15078
4862826d 15079static GTY(()) alias_set_type set = -1;
f103e34d 15080
4862826d 15081alias_set_type
863d938c 15082get_TOC_alias_set (void)
9ebbca7d 15083{
f103e34d
GK
15084 if (set == -1)
15085 set = new_alias_set ();
15086 return set;
f676971a 15087}
9ebbca7d 15088
c1207243 15089/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15090 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15091 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15092#if TARGET_ELF
3c9eb5f4 15093static int
f676971a 15094uses_TOC (void)
9ebbca7d 15095{
c4501e62 15096 rtx insn;
38c1f2d7 15097
c4501e62
JJ
15098 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15099 if (INSN_P (insn))
15100 {
15101 rtx pat = PATTERN (insn);
15102 int i;
9ebbca7d 15103
f676971a 15104 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15105 for (i = 0; i < XVECLEN (pat, 0); i++)
15106 {
15107 rtx sub = XVECEXP (pat, 0, i);
15108 if (GET_CODE (sub) == USE)
15109 {
15110 sub = XEXP (sub, 0);
15111 if (GET_CODE (sub) == UNSPEC
15112 && XINT (sub, 1) == UNSPEC_TOC)
15113 return 1;
15114 }
15115 }
15116 }
15117 return 0;
9ebbca7d 15118}
c954844a 15119#endif
38c1f2d7 15120
9ebbca7d 15121rtx
f676971a 15122create_TOC_reference (rtx symbol)
9ebbca7d 15123{
b3a13419 15124 if (!can_create_pseudo_p ())
6fb5fa3c 15125 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15126 return gen_rtx_PLUS (Pmode,
a8a05998 15127 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15128 gen_rtx_CONST (Pmode,
15129 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15130 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15131}
38c1f2d7 15132
fc4767bb
JJ
15133/* If _Unwind_* has been called from within the same module,
15134 toc register is not guaranteed to be saved to 40(1) on function
15135 entry. Save it there in that case. */
c7ca610e 15136
9ebbca7d 15137void
863d938c 15138rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15139{
15140 rtx mem;
15141 rtx stack_top = gen_reg_rtx (Pmode);
15142 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15143 rtx opcode = gen_reg_rtx (SImode);
15144 rtx tocompare = gen_reg_rtx (SImode);
15145 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15146
8308679f 15147 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15148 emit_move_insn (stack_top, mem);
15149
8308679f
DE
15150 mem = gen_frame_mem (Pmode,
15151 gen_rtx_PLUS (Pmode, stack_top,
15152 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15153 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15154 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15155 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15156 : 0xE8410028, SImode));
9ebbca7d 15157
fc4767bb 15158 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15159 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15160 no_toc_save_needed);
9ebbca7d 15161
8308679f
DE
15162 mem = gen_frame_mem (Pmode,
15163 gen_rtx_PLUS (Pmode, stack_top,
15164 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15165 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15166 emit_label (no_toc_save_needed);
9ebbca7d 15167}
38c1f2d7 15168\f
0be76840
DE
15169/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15170 and the change to the stack pointer. */
ba4828e0 15171
9ebbca7d 15172static void
863d938c 15173rs6000_emit_stack_tie (void)
9ebbca7d 15174{
0be76840
DE
15175 rtx mem = gen_frame_mem (BLKmode,
15176 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15177
9ebbca7d
GK
15178 emit_insn (gen_stack_tie (mem));
15179}
38c1f2d7 15180
9ebbca7d
GK
15181/* Emit the correct code for allocating stack space, as insns.
15182 If COPY_R12, make sure a copy of the old frame is left in r12.
15183 The generated code may use hard register 0 as a temporary. */
15184
15185static void
a2369ed3 15186rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 15187{
9ebbca7d
GK
15188 rtx insn;
15189 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15190 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15191 rtx todec = gen_int_mode (-size, Pmode);
15192
15193 if (INTVAL (todec) != -size)
15194 {
d4ee4d25 15195 warning (0, "stack frame too large");
61168ff1
RS
15196 emit_insn (gen_trap ());
15197 return;
15198 }
a157febd
GK
15199
15200 if (current_function_limit_stack)
15201 {
15202 if (REG_P (stack_limit_rtx)
f676971a 15203 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15204 && REGNO (stack_limit_rtx) <= 31)
15205 {
5b71a4e7 15206 emit_insn (TARGET_32BIT
9ebbca7d
GK
15207 ? gen_addsi3 (tmp_reg,
15208 stack_limit_rtx,
15209 GEN_INT (size))
15210 : gen_adddi3 (tmp_reg,
15211 stack_limit_rtx,
15212 GEN_INT (size)));
5b71a4e7 15213
9ebbca7d
GK
15214 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15215 const0_rtx));
a157febd
GK
15216 }
15217 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15218 && TARGET_32BIT
f607bc57 15219 && DEFAULT_ABI == ABI_V4)
a157febd 15220 {
9ebbca7d 15221 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15222 gen_rtx_PLUS (Pmode,
15223 stack_limit_rtx,
9ebbca7d 15224 GEN_INT (size)));
5b71a4e7 15225
9ebbca7d
GK
15226 emit_insn (gen_elf_high (tmp_reg, toload));
15227 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15228 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15229 const0_rtx));
a157febd
GK
15230 }
15231 else
d4ee4d25 15232 warning (0, "stack limit expression is not supported");
a157febd
GK
15233 }
15234
9ebbca7d
GK
15235 if (copy_r12 || ! TARGET_UPDATE)
15236 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15237
38c1f2d7
MM
15238 if (TARGET_UPDATE)
15239 {
9ebbca7d 15240 if (size > 32767)
38c1f2d7 15241 {
9ebbca7d 15242 /* Need a note here so that try_split doesn't get confused. */
9390387d 15243 if (get_last_insn () == NULL_RTX)
2e040219 15244 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15245 insn = emit_move_insn (tmp_reg, todec);
15246 try_split (PATTERN (insn), insn, 0);
15247 todec = tmp_reg;
38c1f2d7 15248 }
5b71a4e7
DE
15249
15250 insn = emit_insn (TARGET_32BIT
15251 ? gen_movsi_update (stack_reg, stack_reg,
15252 todec, stack_reg)
c4ad648e 15253 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15254 todec, stack_reg));
38c1f2d7
MM
15255 }
15256 else
15257 {
5b71a4e7
DE
15258 insn = emit_insn (TARGET_32BIT
15259 ? gen_addsi3 (stack_reg, stack_reg, todec)
15260 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15261 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15262 gen_rtx_REG (Pmode, 12));
15263 }
f676971a 15264
9ebbca7d 15265 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15266 REG_NOTES (insn) =
9ebbca7d 15267 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15268 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15269 gen_rtx_PLUS (Pmode, stack_reg,
15270 GEN_INT (-size))),
15271 REG_NOTES (insn));
15272}
15273
a4f6c312
SS
15274/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15275 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15276 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15277 deduce these equivalences by itself so it wasn't necessary to hold
15278 its hand so much. */
9ebbca7d
GK
15279
15280static void
f676971a 15281rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15282 rtx reg2, rtx rreg)
9ebbca7d
GK
15283{
15284 rtx real, temp;
15285
e56c4463
JL
15286 /* copy_rtx will not make unique copies of registers, so we need to
15287 ensure we don't have unwanted sharing here. */
15288 if (reg == reg2)
15289 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15290
15291 if (reg == rreg)
15292 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15293
9ebbca7d
GK
15294 real = copy_rtx (PATTERN (insn));
15295
89e7058f
AH
15296 if (reg2 != NULL_RTX)
15297 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15298
15299 real = replace_rtx (real, reg,
9ebbca7d
GK
15300 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15301 STACK_POINTER_REGNUM),
15302 GEN_INT (val)));
f676971a 15303
9ebbca7d
GK
15304 /* We expect that 'real' is either a SET or a PARALLEL containing
15305 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15306 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15307
15308 if (GET_CODE (real) == SET)
15309 {
15310 rtx set = real;
f676971a 15311
9ebbca7d
GK
15312 temp = simplify_rtx (SET_SRC (set));
15313 if (temp)
15314 SET_SRC (set) = temp;
15315 temp = simplify_rtx (SET_DEST (set));
15316 if (temp)
15317 SET_DEST (set) = temp;
15318 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15319 {
9ebbca7d
GK
15320 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15321 if (temp)
15322 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15323 }
38c1f2d7 15324 }
37409796 15325 else
9ebbca7d
GK
15326 {
15327 int i;
37409796
NS
15328
15329 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15330 for (i = 0; i < XVECLEN (real, 0); i++)
15331 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15332 {
15333 rtx set = XVECEXP (real, 0, i);
f676971a 15334
9ebbca7d
GK
15335 temp = simplify_rtx (SET_SRC (set));
15336 if (temp)
15337 SET_SRC (set) = temp;
15338 temp = simplify_rtx (SET_DEST (set));
15339 if (temp)
15340 SET_DEST (set) = temp;
15341 if (GET_CODE (SET_DEST (set)) == MEM)
15342 {
15343 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15344 if (temp)
15345 XEXP (SET_DEST (set), 0) = temp;
15346 }
15347 RTX_FRAME_RELATED_P (set) = 1;
15348 }
15349 }
c19de7aa
AH
15350
15351 if (TARGET_SPE)
15352 real = spe_synthesize_frame_save (real);
15353
9ebbca7d
GK
15354 RTX_FRAME_RELATED_P (insn) = 1;
15355 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15356 real,
15357 REG_NOTES (insn));
38c1f2d7
MM
15358}
15359
c19de7aa
AH
15360/* Given an SPE frame note, return a PARALLEL of SETs with the
15361 original note, plus a synthetic register save. */
15362
15363static rtx
a2369ed3 15364spe_synthesize_frame_save (rtx real)
c19de7aa
AH
15365{
15366 rtx synth, offset, reg, real2;
15367
15368 if (GET_CODE (real) != SET
15369 || GET_MODE (SET_SRC (real)) != V2SImode)
15370 return real;
15371
15372 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
15373 frame related note. The parallel contains a set of the register
41f3a930 15374 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
15375 This is so we can differentiate between 64-bit and 32-bit saves.
15376 Words cannot describe this nastiness. */
15377
37409796
NS
15378 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
15379 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
15380 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
15381
15382 /* Transform:
15383 (set (mem (plus (reg x) (const y)))
15384 (reg z))
15385 into:
15386 (set (mem (plus (reg x) (const y+4)))
41f3a930 15387 (reg z+1200))
c19de7aa
AH
15388 */
15389
15390 real2 = copy_rtx (real);
15391 PUT_MODE (SET_DEST (real2), SImode);
15392 reg = SET_SRC (real2);
15393 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
15394 synth = copy_rtx (real2);
15395
15396 if (BYTES_BIG_ENDIAN)
15397 {
15398 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
15399 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
15400 }
15401
15402 reg = SET_SRC (synth);
41f3a930 15403
c19de7aa 15404 synth = replace_rtx (synth, reg,
41f3a930 15405 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
15406
15407 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
15408 synth = replace_rtx (synth, offset,
15409 GEN_INT (INTVAL (offset)
15410 + (BYTES_BIG_ENDIAN ? 0 : 4)));
15411
15412 RTX_FRAME_RELATED_P (synth) = 1;
15413 RTX_FRAME_RELATED_P (real2) = 1;
15414 if (BYTES_BIG_ENDIAN)
15415 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
15416 else
15417 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
15418
15419 return real;
15420}
15421
00b960c7
AH
15422/* Returns an insn that has a vrsave set operation with the
15423 appropriate CLOBBERs. */
15424
15425static rtx
a2369ed3 15426generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15427{
15428 int nclobs, i;
15429 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15430 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15431
a004eb82
AH
15432 clobs[0]
15433 = gen_rtx_SET (VOIDmode,
15434 vrsave,
15435 gen_rtx_UNSPEC_VOLATILE (SImode,
15436 gen_rtvec (2, reg, vrsave),
3aca4bff 15437 UNSPECV_SET_VRSAVE));
00b960c7
AH
15438
15439 nclobs = 1;
15440
9aa86737
AH
15441 /* We need to clobber the registers in the mask so the scheduler
15442 does not move sets to VRSAVE before sets of AltiVec registers.
15443
15444 However, if the function receives nonlocal gotos, reload will set
15445 all call saved registers live. We will end up with:
15446
15447 (set (reg 999) (mem))
15448 (parallel [ (set (reg vrsave) (unspec blah))
15449 (clobber (reg 999))])
15450
15451 The clobber will cause the store into reg 999 to be dead, and
15452 flow will attempt to delete an epilogue insn. In this case, we
15453 need an unspec use/set of the register. */
00b960c7
AH
15454
15455 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15456 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15457 {
15458 if (!epiloguep || call_used_regs [i])
15459 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15460 gen_rtx_REG (V4SImode, i));
15461 else
15462 {
15463 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15464
15465 clobs[nclobs++]
a004eb82
AH
15466 = gen_rtx_SET (VOIDmode,
15467 reg,
15468 gen_rtx_UNSPEC (V4SImode,
15469 gen_rtvec (1, reg), 27));
9aa86737
AH
15470 }
15471 }
00b960c7
AH
15472
15473 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15474
15475 for (i = 0; i < nclobs; ++i)
15476 XVECEXP (insn, 0, i) = clobs[i];
15477
15478 return insn;
15479}
15480
89e7058f
AH
15481/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15482 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15483
15484static void
f676971a 15485emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15486 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15487{
15488 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15489 rtx replacea, replaceb;
15490
15491 int_rtx = GEN_INT (offset);
15492
15493 /* Some cases that need register indexed addressing. */
15494 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15495 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15496 || (TARGET_SPE_ABI
15497 && SPE_VECTOR_MODE (mode)
15498 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15499 {
15500 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15501 flow path of instructions in the prologue. */
89e7058f
AH
15502 offset_rtx = gen_rtx_REG (Pmode, 11);
15503 emit_move_insn (offset_rtx, int_rtx);
15504
15505 replacea = offset_rtx;
15506 replaceb = int_rtx;
15507 }
15508 else
15509 {
15510 offset_rtx = int_rtx;
15511 replacea = NULL_RTX;
15512 replaceb = NULL_RTX;
15513 }
15514
15515 reg = gen_rtx_REG (mode, regno);
15516 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15517 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15518
15519 insn = emit_move_insn (mem, reg);
15520
15521 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15522}
15523
a3170dc6
AH
15524/* Emit an offset memory reference suitable for a frame store, while
15525 converting to a valid addressing mode. */
15526
15527static rtx
a2369ed3 15528gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15529{
15530 rtx int_rtx, offset_rtx;
15531
15532 int_rtx = GEN_INT (offset);
15533
4d4cbc0e 15534 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15535 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15536 {
15537 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15538 emit_move_insn (offset_rtx, int_rtx);
15539 }
15540 else
15541 offset_rtx = int_rtx;
15542
0be76840 15543 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15544}
15545
6d0a8091
DJ
15546/* Look for user-defined global regs. We should not save and restore these,
15547 and cannot use stmw/lmw if there are any in its range. */
15548
15549static bool
15550no_global_regs_above (int first_greg)
15551{
15552 int i;
15553 for (i = 0; i < 32 - first_greg; i++)
15554 if (global_regs[first_greg + i])
15555 return false;
15556 return true;
15557}
15558
699c914a
MS
15559#ifndef TARGET_FIX_AND_CONTINUE
15560#define TARGET_FIX_AND_CONTINUE 0
15561#endif
15562
52ff33d0
NF
15563/* Determine whether the gp REG is really used. */
15564
15565static bool
15566rs6000_reg_live_or_pic_offset_p (int reg)
15567{
6fb5fa3c 15568 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15569 && (!call_used_regs[reg]
15570 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15571 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15572 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15573 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15574 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15575}
15576
9ebbca7d
GK
15577/* Emit function prologue as insns. */
15578
9878760c 15579void
863d938c 15580rs6000_emit_prologue (void)
9878760c 15581{
4697a36c 15582 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15583 enum machine_mode reg_mode = Pmode;
327e5343 15584 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15585 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15586 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15587 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15588 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15589 rtx insn;
15590 int saving_FPRs_inline;
15591 int using_store_multiple;
15592 HOST_WIDE_INT sp_offset = 0;
f676971a 15593
699c914a
MS
15594 if (TARGET_FIX_AND_CONTINUE)
15595 {
15596 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15597 address by modifying the first 5 instructions of the function
699c914a
MS
15598 to branch to the overriding function. This is necessary to
15599 permit function pointers that point to the old function to
15600 actually forward to the new function. */
15601 emit_insn (gen_nop ());
15602 emit_insn (gen_nop ());
de2ab0ca 15603 emit_insn (gen_nop ());
699c914a
MS
15604 emit_insn (gen_nop ());
15605 emit_insn (gen_nop ());
15606 }
15607
15608 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15609 {
15610 reg_mode = V2SImode;
15611 reg_size = 8;
15612 }
a3170dc6 15613
9ebbca7d 15614 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15615 && (!TARGET_SPE_ABI
15616 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15617 && info->first_gp_reg_save < 31
15618 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15619 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15620 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15621 || current_function_calls_eh_return
8c29550d 15622 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15623
15624 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15625 if (! WORLD_SAVE_P (info)
15626 && info->push_p
acd0b319
AM
15627 && (DEFAULT_ABI == ABI_V4
15628 || current_function_calls_eh_return))
9ebbca7d
GK
15629 {
15630 if (info->total_size < 32767)
15631 sp_offset = info->total_size;
15632 else
15633 frame_reg_rtx = frame_ptr_rtx;
f676971a 15634 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15635 (frame_reg_rtx != sp_reg_rtx
15636 && (info->cr_save_p
15637 || info->lr_save_p
15638 || info->first_fp_reg_save < 64
15639 || info->first_gp_reg_save < 32
15640 )));
15641 if (frame_reg_rtx != sp_reg_rtx)
15642 rs6000_emit_stack_tie ();
15643 }
15644
d62294f5 15645 /* Handle world saves specially here. */
f57fe068 15646 if (WORLD_SAVE_P (info))
d62294f5
FJ
15647 {
15648 int i, j, sz;
15649 rtx treg;
15650 rtvec p;
22fa69da 15651 rtx reg0;
d62294f5
FJ
15652
15653 /* save_world expects lr in r0. */
22fa69da 15654 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15655 if (info->lr_save_p)
c4ad648e 15656 {
22fa69da 15657 insn = emit_move_insn (reg0,
1de43f85 15658 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15659 RTX_FRAME_RELATED_P (insn) = 1;
15660 }
d62294f5
FJ
15661
15662 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15663 assumptions about the offsets of various bits of the stack
992d08b1 15664 frame. */
37409796
NS
15665 gcc_assert (info->gp_save_offset == -220
15666 && info->fp_save_offset == -144
15667 && info->lr_save_offset == 8
15668 && info->cr_save_offset == 4
15669 && info->push_p
15670 && info->lr_save_p
15671 && (!current_function_calls_eh_return
15672 || info->ehrd_offset == -432)
15673 && info->vrsave_save_offset == -224
22fa69da 15674 && info->altivec_save_offset == -416);
d62294f5
FJ
15675
15676 treg = gen_rtx_REG (SImode, 11);
15677 emit_move_insn (treg, GEN_INT (-info->total_size));
15678
15679 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15680 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15681
15682 /* Preserve CR2 for save_world prologues */
22fa69da 15683 sz = 5;
d62294f5
FJ
15684 sz += 32 - info->first_gp_reg_save;
15685 sz += 64 - info->first_fp_reg_save;
15686 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15687 p = rtvec_alloc (sz);
15688 j = 0;
15689 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15690 gen_rtx_REG (SImode,
1de43f85 15691 LR_REGNO));
d62294f5 15692 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15693 gen_rtx_SYMBOL_REF (Pmode,
15694 "*save_world"));
d62294f5 15695 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15696 properly. */
15697 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15698 {
15699 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15700 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15701 GEN_INT (info->fp_save_offset
15702 + sp_offset + 8 * i));
0be76840 15703 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15704
15705 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15706 }
d62294f5 15707 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15708 {
15709 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15710 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15711 GEN_INT (info->altivec_save_offset
15712 + sp_offset + 16 * i));
0be76840 15713 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15714
15715 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15716 }
d62294f5 15717 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15718 {
15719 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15720 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15721 GEN_INT (info->gp_save_offset
15722 + sp_offset + reg_size * i));
0be76840 15723 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15724
15725 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15726 }
15727
15728 {
15729 /* CR register traditionally saved as CR2. */
15730 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15731 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15732 GEN_INT (info->cr_save_offset
15733 + sp_offset));
0be76840 15734 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15735
15736 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15737 }
22fa69da
GK
15738 /* Explain about use of R0. */
15739 if (info->lr_save_p)
15740 {
15741 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15742 GEN_INT (info->lr_save_offset
15743 + sp_offset));
15744 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15745
22fa69da
GK
15746 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15747 }
15748 /* Explain what happens to the stack pointer. */
15749 {
15750 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15751 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15752 }
d62294f5
FJ
15753
15754 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15755 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15756 treg, GEN_INT (-info->total_size));
15757 sp_offset = info->total_size;
d62294f5
FJ
15758 }
15759
9ebbca7d 15760 /* If we use the link register, get it into r0. */
f57fe068 15761 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15762 {
52ff33d0
NF
15763 rtx addr, reg, mem;
15764
f8a57be8 15765 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15766 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15767 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15768
15769 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15770 GEN_INT (info->lr_save_offset + sp_offset));
15771 reg = gen_rtx_REG (Pmode, 0);
15772 mem = gen_rtx_MEM (Pmode, addr);
15773 /* This should not be of rs6000_sr_alias_set, because of
15774 __builtin_return_address. */
15775
15776 insn = emit_move_insn (mem, reg);
15777 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15778 NULL_RTX, NULL_RTX);
f8a57be8 15779 }
9ebbca7d
GK
15780
15781 /* If we need to save CR, put it into r12. */
f57fe068 15782 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15783 {
f8a57be8 15784 rtx set;
f676971a 15785
9ebbca7d 15786 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15787 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15788 RTX_FRAME_RELATED_P (insn) = 1;
15789 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15790 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15791 But that's OK. All we have to do is specify that _one_ condition
15792 code register is saved in this stack slot. The thrower's epilogue
15793 will then restore all the call-saved registers.
15794 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15795 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15796 gen_rtx_REG (SImode, CR2_REGNO));
15797 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15798 set,
15799 REG_NOTES (insn));
9ebbca7d
GK
15800 }
15801
a4f6c312
SS
15802 /* Do any required saving of fpr's. If only one or two to save, do
15803 it ourselves. Otherwise, call function. */
f57fe068 15804 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15805 {
15806 int i;
15807 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15808 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15809 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15810 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15811 info->first_fp_reg_save + i,
15812 info->fp_save_offset + sp_offset + 8 * i,
15813 info->total_size);
9ebbca7d 15814 }
f57fe068 15815 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15816 {
15817 int i;
15818 char rname[30];
520a57c8 15819 const char *alloc_rname;
9ebbca7d
GK
15820 rtvec p;
15821 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15822
15823 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15824 gen_rtx_REG (Pmode,
1de43f85 15825 LR_REGNO));
9ebbca7d
GK
15826 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15827 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15828 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15829 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15830 gen_rtx_SYMBOL_REF (Pmode,
15831 alloc_rname));
15832 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15833 {
15834 rtx addr, reg, mem;
15835 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15836 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15837 GEN_INT (info->fp_save_offset
9ebbca7d 15838 + sp_offset + 8*i));
0be76840 15839 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15840
15841 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15842 }
15843 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15844 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15845 NULL_RTX, NULL_RTX);
15846 }
b6c9286a 15847
9ebbca7d
GK
15848 /* Save GPRs. This is done as a PARALLEL if we are using
15849 the store-multiple instructions. */
f57fe068 15850 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15851 {
308c142a 15852 rtvec p;
9ebbca7d
GK
15853 int i;
15854 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15855 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15856 {
15857 rtx addr, reg, mem;
15858 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15859 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15860 GEN_INT (info->gp_save_offset
15861 + sp_offset
9ebbca7d 15862 + reg_size * i));
0be76840 15863 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15864
15865 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15866 }
15867 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15868 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15869 NULL_RTX, NULL_RTX);
b6c9286a 15870 }
52ff33d0
NF
15871 else if (!WORLD_SAVE_P (info)
15872 && TARGET_SPE_ABI
15873 && info->spe_64bit_regs_used != 0
15874 && info->first_gp_reg_save != 32)
15875 {
15876 int i;
15877 rtx spe_save_area_ptr;
15878 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15879 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15880 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15881
15882 /* Determine whether we can address all of the registers that need
15883 to be saved with an offset from the stack pointer that fits in
15884 the small const field for SPE memory instructions. */
15885 int spe_regs_addressable_via_sp
15886 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15887 + (32 - info->first_gp_reg_save - 1) * reg_size);
15888 int spe_offset;
15889
15890 if (spe_regs_addressable_via_sp)
15891 {
30895f30 15892 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15893 spe_offset = info->spe_gp_save_offset + sp_offset;
15894 }
15895 else
15896 {
15897 /* Make r11 point to the start of the SPE save area. We need
15898 to be careful here if r11 is holding the static chain. If
15899 it is, then temporarily save it in r0. We would use r0 as
15900 our base register here, but using r0 as a base register in
15901 loads and stores means something different from what we
15902 would like. */
15903 if (using_static_chain_p)
15904 {
15905 rtx r0 = gen_rtx_REG (Pmode, 0);
15906
15907 gcc_assert (info->first_gp_reg_save > 11);
15908
15909 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15910 }
15911
15912 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15913 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15914 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15915
15916 spe_offset = 0;
15917 }
15918
15919 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15920 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15921 {
15922 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15923 rtx offset, addr, mem;
15924
15925 /* We're doing all this to ensure that the offset fits into
15926 the immediate offset of 'evstdd'. */
15927 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15928
15929 offset = GEN_INT (reg_size * i + spe_offset);
15930 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15931 mem = gen_rtx_MEM (V2SImode, addr);
15932
15933 insn = emit_move_insn (mem, reg);
15934
15935 rs6000_frame_related (insn, spe_save_area_ptr,
15936 info->spe_gp_save_offset
15937 + sp_offset + reg_size * i,
15938 offset, const0_rtx);
15939 }
15940
15941 /* Move the static chain pointer back. */
15942 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15943 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15944 }
f57fe068 15945 else if (!WORLD_SAVE_P (info))
b6c9286a 15946 {
9ebbca7d
GK
15947 int i;
15948 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15949 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15950 {
15951 rtx addr, reg, mem;
15952 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15953
52ff33d0
NF
15954 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15955 GEN_INT (info->gp_save_offset
15956 + sp_offset
15957 + reg_size * i));
15958 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15959
52ff33d0
NF
15960 insn = emit_move_insn (mem, reg);
15961 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15962 NULL_RTX, NULL_RTX);
15963 }
9ebbca7d
GK
15964 }
15965
83720594
RH
15966 /* ??? There's no need to emit actual instructions here, but it's the
15967 easiest way to get the frame unwind information emitted. */
22fa69da 15968 if (current_function_calls_eh_return)
83720594 15969 {
78e1b90d
DE
15970 unsigned int i, regno;
15971
fc4767bb
JJ
15972 /* In AIX ABI we need to pretend we save r2 here. */
15973 if (TARGET_AIX)
15974 {
15975 rtx addr, reg, mem;
15976
15977 reg = gen_rtx_REG (reg_mode, 2);
15978 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15979 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15980 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15981
15982 insn = emit_move_insn (mem, reg);
f676971a 15983 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15984 NULL_RTX, NULL_RTX);
15985 PATTERN (insn) = gen_blockage ();
15986 }
15987
83720594
RH
15988 for (i = 0; ; ++i)
15989 {
83720594
RH
15990 regno = EH_RETURN_DATA_REGNO (i);
15991 if (regno == INVALID_REGNUM)
15992 break;
15993
89e7058f
AH
15994 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15995 info->ehrd_offset + sp_offset
15996 + reg_size * (int) i,
15997 info->total_size);
83720594
RH
15998 }
15999 }
16000
9ebbca7d 16001 /* Save CR if we use any that must be preserved. */
f57fe068 16002 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16003 {
16004 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16005 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16006 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16007 /* See the large comment above about why CR2_REGNO is used. */
16008 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16009
9ebbca7d
GK
16010 /* If r12 was used to hold the original sp, copy cr into r0 now
16011 that it's free. */
16012 if (REGNO (frame_reg_rtx) == 12)
16013 {
f8a57be8
GK
16014 rtx set;
16015
9ebbca7d 16016 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16017 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16018 RTX_FRAME_RELATED_P (insn) = 1;
16019 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16020 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16021 set,
16022 REG_NOTES (insn));
f676971a 16023
9ebbca7d
GK
16024 }
16025 insn = emit_move_insn (mem, cr_save_rtx);
16026
f676971a 16027 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16028 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16029 }
16030
f676971a 16031 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16032 for which it was done previously. */
f57fe068 16033 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 16034 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 16035 {
bcb2d701 16036 if (info->total_size < 32767)
2b2c2fe5 16037 sp_offset = info->total_size;
bcb2d701
EC
16038 else
16039 frame_reg_rtx = frame_ptr_rtx;
16040 rs6000_emit_allocate_stack (info->total_size,
16041 (frame_reg_rtx != sp_reg_rtx
16042 && ((info->altivec_size != 0)
16043 || (info->vrsave_mask != 0)
16044 )));
16045 if (frame_reg_rtx != sp_reg_rtx)
16046 rs6000_emit_stack_tie ();
2b2c2fe5 16047 }
9ebbca7d
GK
16048
16049 /* Set frame pointer, if needed. */
16050 if (frame_pointer_needed)
16051 {
7d5175e1 16052 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16053 sp_reg_rtx);
16054 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16055 }
9878760c 16056
2b2c2fe5
EC
16057 /* Save AltiVec registers if needed. Save here because the red zone does
16058 not include AltiVec registers. */
16059 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16060 {
16061 int i;
16062
16063 /* There should be a non inline version of this, for when we
16064 are saving lots of vector registers. */
16065 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16066 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16067 {
16068 rtx areg, savereg, mem;
16069 int offset;
16070
16071 offset = info->altivec_save_offset + sp_offset
16072 + 16 * (i - info->first_altivec_reg_save);
16073
16074 savereg = gen_rtx_REG (V4SImode, i);
16075
16076 areg = gen_rtx_REG (Pmode, 0);
16077 emit_move_insn (areg, GEN_INT (offset));
16078
16079 /* AltiVec addressing mode is [reg+reg]. */
16080 mem = gen_frame_mem (V4SImode,
16081 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16082
16083 insn = emit_move_insn (mem, savereg);
16084
16085 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16086 areg, GEN_INT (offset));
16087 }
16088 }
16089
16090 /* VRSAVE is a bit vector representing which AltiVec registers
16091 are used. The OS uses this to determine which vector
16092 registers to save on a context switch. We need to save
16093 VRSAVE on the stack frame, add whatever AltiVec registers we
16094 used in this function, and do the corresponding magic in the
16095 epilogue. */
16096
16097 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16098 && info->vrsave_mask != 0)
16099 {
16100 rtx reg, mem, vrsave;
16101 int offset;
16102
16103 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16104 as frame_reg_rtx and r11 as the static chain pointer for
16105 nested functions. */
16106 reg = gen_rtx_REG (SImode, 0);
16107 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16108 if (TARGET_MACHO)
16109 emit_insn (gen_get_vrsave_internal (reg));
16110 else
16111 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16112
16113 if (!WORLD_SAVE_P (info))
16114 {
16115 /* Save VRSAVE. */
16116 offset = info->vrsave_save_offset + sp_offset;
16117 mem = gen_frame_mem (SImode,
16118 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16119 GEN_INT (offset)));
16120 insn = emit_move_insn (mem, reg);
16121 }
16122
16123 /* Include the registers in the mask. */
16124 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16125
16126 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16127 }
16128
1db02437 16129 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16130 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16131 || (DEFAULT_ABI == ABI_V4
16132 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16133 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16134 {
16135 /* If emit_load_toc_table will use the link register, we need to save
16136 it. We use R12 for this purpose because emit_load_toc_table
16137 can use register 0. This allows us to use a plain 'blr' to return
16138 from the procedure more often. */
16139 int save_LR_around_toc_setup = (TARGET_ELF
16140 && DEFAULT_ABI != ABI_AIX
16141 && flag_pic
16142 && ! info->lr_save_p
16143 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16144 if (save_LR_around_toc_setup)
16145 {
1de43f85 16146 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16147
c4ad648e 16148 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16149 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16150
c4ad648e 16151 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16152
c4ad648e 16153 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16154 RTX_FRAME_RELATED_P (insn) = 1;
16155 }
16156 else
16157 rs6000_emit_load_toc_table (TRUE);
16158 }
ee890fe2 16159
fcce224d 16160#if TARGET_MACHO
ee890fe2
SS
16161 if (DEFAULT_ABI == ABI_DARWIN
16162 && flag_pic && current_function_uses_pic_offset_table)
16163 {
1de43f85 16164 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16165 rtx src = machopic_function_base_sym ();
ee890fe2 16166
6d0a8091
DJ
16167 /* Save and restore LR locally around this call (in R0). */
16168 if (!info->lr_save_p)
6fb5fa3c 16169 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16170
6fb5fa3c 16171 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16172
6fb5fa3c
DB
16173 emit_move_insn (gen_rtx_REG (Pmode,
16174 RS6000_PIC_OFFSET_TABLE_REGNUM),
16175 lr);
6d0a8091
DJ
16176
16177 if (!info->lr_save_p)
6fb5fa3c 16178 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16179 }
fcce224d 16180#endif
9ebbca7d
GK
16181}
16182
9ebbca7d 16183/* Write function prologue. */
a4f6c312 16184
08c148a8 16185static void
f676971a 16186rs6000_output_function_prologue (FILE *file,
a2369ed3 16187 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16188{
16189 rs6000_stack_t *info = rs6000_stack_info ();
16190
4697a36c
MM
16191 if (TARGET_DEBUG_STACK)
16192 debug_stack_info (info);
9878760c 16193
a4f6c312
SS
16194 /* Write .extern for any function we will call to save and restore
16195 fp values. */
16196 if (info->first_fp_reg_save < 64
16197 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16198 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16199 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16200 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16201 RESTORE_FP_SUFFIX);
9878760c 16202
c764f757
RK
16203 /* Write .extern for AIX common mode routines, if needed. */
16204 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16205 {
f6709c70
JW
16206 fputs ("\t.extern __mulh\n", file);
16207 fputs ("\t.extern __mull\n", file);
16208 fputs ("\t.extern __divss\n", file);
16209 fputs ("\t.extern __divus\n", file);
16210 fputs ("\t.extern __quoss\n", file);
16211 fputs ("\t.extern __quous\n", file);
c764f757
RK
16212 common_mode_defined = 1;
16213 }
9878760c 16214
9ebbca7d 16215 if (! HAVE_prologue)
979721f8 16216 {
9ebbca7d 16217 start_sequence ();
9dda4cc8 16218
a4f6c312
SS
16219 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16220 the "toplevel" insn chain. */
2e040219 16221 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16222 rs6000_emit_prologue ();
2e040219 16223 emit_note (NOTE_INSN_DELETED);
178c3eff 16224
a3c9585f 16225 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16226 {
16227 rtx insn;
16228 unsigned addr = 0;
16229 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16230 {
16231 INSN_ADDRESSES_NEW (insn, addr);
16232 addr += 4;
16233 }
16234 }
9dda4cc8 16235
9ebbca7d 16236 if (TARGET_DEBUG_STACK)
a4f6c312 16237 debug_rtx_list (get_insns (), 100);
c9d691e9 16238 final (get_insns (), file, FALSE);
9ebbca7d 16239 end_sequence ();
979721f8
MM
16240 }
16241
9ebbca7d
GK
16242 rs6000_pic_labelno++;
16243}
f676971a 16244
9ebbca7d 16245/* Emit function epilogue as insns.
9878760c 16246
9ebbca7d
GK
16247 At present, dwarf2out_frame_debug_expr doesn't understand
16248 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16249 anywhere in the epilogue. Most of the insns below would in any case
16250 need special notes to explain where r11 is in relation to the stack. */
9878760c 16251
9ebbca7d 16252void
a2369ed3 16253rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16254{
16255 rs6000_stack_t *info;
16256 int restoring_FPRs_inline;
16257 int using_load_multiple;
d296e02e 16258 int using_mtcr_multiple;
9ebbca7d
GK
16259 int use_backchain_to_restore_sp;
16260 int sp_offset = 0;
16261 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16262 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16263 enum machine_mode reg_mode = Pmode;
327e5343 16264 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16265 int i;
16266
c19de7aa
AH
16267 info = rs6000_stack_info ();
16268
16269 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16270 {
16271 reg_mode = V2SImode;
16272 reg_size = 8;
16273 }
16274
9ebbca7d 16275 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16276 && (!TARGET_SPE_ABI
16277 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16278 && info->first_gp_reg_save < 31
16279 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16280 restoring_FPRs_inline = (sibcall
83720594 16281 || current_function_calls_eh_return
9ebbca7d
GK
16282 || info->first_fp_reg_save == 64
16283 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16284 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16285 || current_function_calls_alloca
16286 || info->total_size > 32767);
d296e02e 16287 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16288 || rs6000_cpu == PROCESSOR_PPC603
16289 || rs6000_cpu == PROCESSOR_PPC750
16290 || optimize_size);
16291
f57fe068 16292 if (WORLD_SAVE_P (info))
d62294f5
FJ
16293 {
16294 int i, j;
16295 char rname[30];
16296 const char *alloc_rname;
16297 rtvec p;
16298
16299 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16300 stack slot (which is not likely to be our caller.)
16301 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16302 rest_world is similar, except any R10 parameter is ignored.
16303 The exception-handling stuff that was here in 2.95 is no
16304 longer necessary. */
d62294f5
FJ
16305
16306 p = rtvec_alloc (9
16307 + 1
f676971a 16308 + 32 - info->first_gp_reg_save
c4ad648e
AM
16309 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16310 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16311
c4ad648e
AM
16312 strcpy (rname, ((current_function_calls_eh_return) ?
16313 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16314 alloc_rname = ggc_strdup (rname);
16315
16316 j = 0;
16317 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16318 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16319 gen_rtx_REG (Pmode,
1de43f85 16320 LR_REGNO));
d62294f5 16321 RTVEC_ELT (p, j++)
c4ad648e 16322 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16323 /* The instruction pattern requires a clobber here;
c4ad648e 16324 it is shared with the restVEC helper. */
d62294f5 16325 RTVEC_ELT (p, j++)
c4ad648e 16326 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16327
16328 {
c4ad648e
AM
16329 /* CR register traditionally saved as CR2. */
16330 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16331 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16332 GEN_INT (info->cr_save_offset));
0be76840 16333 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16334
16335 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16336 }
16337
16338 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16339 {
16340 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16341 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16342 GEN_INT (info->gp_save_offset
16343 + reg_size * i));
0be76840 16344 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16345
16346 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16347 }
d62294f5 16348 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16349 {
16350 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16351 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16352 GEN_INT (info->altivec_save_offset
16353 + 16 * i));
0be76840 16354 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16355
16356 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16357 }
d62294f5 16358 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16359 {
16360 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16361 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16362 GEN_INT (info->fp_save_offset
16363 + 8 * i));
0be76840 16364 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16365
16366 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16367 }
d62294f5 16368 RTVEC_ELT (p, j++)
c4ad648e 16369 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16370 RTVEC_ELT (p, j++)
c4ad648e 16371 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16372 RTVEC_ELT (p, j++)
c4ad648e 16373 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16374 RTVEC_ELT (p, j++)
c4ad648e 16375 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16376 RTVEC_ELT (p, j++)
c4ad648e 16377 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16378 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16379
16380 return;
16381 }
16382
45b194f8
AM
16383 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16384 if (info->push_p)
2b2c2fe5 16385 sp_offset = info->total_size;
f676971a 16386
9aa86737
AH
16387 /* Restore AltiVec registers if needed. */
16388 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16389 {
16390 int i;
16391
16392 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16393 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16394 {
16395 rtx addr, areg, mem;
16396
16397 areg = gen_rtx_REG (Pmode, 0);
16398 emit_move_insn
16399 (areg, GEN_INT (info->altivec_save_offset
16400 + sp_offset
16401 + 16 * (i - info->first_altivec_reg_save)));
16402
16403 /* AltiVec addressing mode is [reg+reg]. */
16404 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16405 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16406
16407 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16408 }
16409 }
16410
2b2c2fe5
EC
16411 /* If we have a frame pointer, a call to alloca, or a large stack
16412 frame, restore the old stack pointer using the backchain. Otherwise,
16413 we know what size to update it with. */
16414 if (use_backchain_to_restore_sp)
16415 {
16416 /* Under V.4, don't reset the stack pointer until after we're done
16417 loading the saved registers. */
16418 if (DEFAULT_ABI == ABI_V4)
16419 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16420
16421 emit_move_insn (frame_reg_rtx,
16422 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16423 sp_offset = 0;
2b2c2fe5 16424 }
45b194f8
AM
16425 else if (info->push_p
16426 && DEFAULT_ABI != ABI_V4
16427 && !current_function_calls_eh_return)
2b2c2fe5 16428 {
45b194f8
AM
16429 emit_insn (TARGET_32BIT
16430 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16431 GEN_INT (info->total_size))
16432 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16433 GEN_INT (info->total_size)));
16434 sp_offset = 0;
2b2c2fe5
EC
16435 }
16436
554c2941
AM
16437 /* Restore VRSAVE if needed. */
16438 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16439 && info->vrsave_mask != 0)
16440 {
16441 rtx addr, mem, reg;
16442
16443 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16444 GEN_INT (info->vrsave_save_offset + sp_offset));
16445 mem = gen_frame_mem (SImode, addr);
16446 reg = gen_rtx_REG (SImode, 12);
16447 emit_move_insn (reg, mem);
16448
16449 emit_insn (generate_set_vrsave (reg, info, 1));
16450 }
16451
9ebbca7d
GK
16452 /* Get the old lr if we saved it. */
16453 if (info->lr_save_p)
b6c9286a 16454 {
a3170dc6
AH
16455 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16456 info->lr_save_offset + sp_offset);
ba4828e0 16457
9ebbca7d 16458 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16459 }
f676971a 16460
9ebbca7d
GK
16461 /* Get the old cr if we saved it. */
16462 if (info->cr_save_p)
16463 {
16464 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16465 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16466 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16467
9ebbca7d
GK
16468 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16469 }
f676971a 16470
9ebbca7d 16471 /* Set LR here to try to overlap restores below. */
4697a36c 16472 if (info->lr_save_p)
1de43f85 16473 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16474 gen_rtx_REG (Pmode, 0));
f676971a 16475
83720594
RH
16476 /* Load exception handler data registers, if needed. */
16477 if (current_function_calls_eh_return)
16478 {
78e1b90d
DE
16479 unsigned int i, regno;
16480
fc4767bb
JJ
16481 if (TARGET_AIX)
16482 {
16483 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16484 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16485 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16486
16487 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16488 }
16489
83720594
RH
16490 for (i = 0; ; ++i)
16491 {
a3170dc6 16492 rtx mem;
83720594
RH
16493
16494 regno = EH_RETURN_DATA_REGNO (i);
16495 if (regno == INVALID_REGNUM)
16496 break;
16497
a3170dc6
AH
16498 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16499 info->ehrd_offset + sp_offset
16500 + reg_size * (int) i);
83720594
RH
16501
16502 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16503 }
16504 }
f676971a 16505
9ebbca7d
GK
16506 /* Restore GPRs. This is done as a PARALLEL if we are using
16507 the load-multiple instructions. */
16508 if (using_load_multiple)
979721f8 16509 {
9ebbca7d
GK
16510 rtvec p;
16511 p = rtvec_alloc (32 - info->first_gp_reg_save);
16512 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16513 {
f676971a
EC
16514 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16515 GEN_INT (info->gp_save_offset
16516 + sp_offset
9ebbca7d 16517 + reg_size * i));
0be76840 16518 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16519
f676971a 16520 RTVEC_ELT (p, i) =
9ebbca7d
GK
16521 gen_rtx_SET (VOIDmode,
16522 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16523 mem);
979721f8 16524 }
9ebbca7d 16525 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16526 }
52ff33d0
NF
16527 else if (TARGET_SPE_ABI
16528 && info->spe_64bit_regs_used != 0
16529 && info->first_gp_reg_save != 32)
16530 {
52ff33d0
NF
16531 /* Determine whether we can address all of the registers that need
16532 to be saved with an offset from the stack pointer that fits in
16533 the small const field for SPE memory instructions. */
16534 int spe_regs_addressable_via_sp
16535 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16536 + (32 - info->first_gp_reg_save - 1) * reg_size);
16537 int spe_offset;
16538
16539 if (spe_regs_addressable_via_sp)
45b194f8 16540 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16541 else
16542 {
45b194f8 16543 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16544 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16545 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16546 There's no need to worry here because the static chain is passed
16547 anew to every function. */
45b194f8
AM
16548 if (frame_reg_rtx == sp_reg_rtx)
16549 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16550 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16551 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16552 /* Keep the invariant that frame_reg_rtx + sp_offset points
16553 at the top of the stack frame. */
16554 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16555
16556 spe_offset = 0;
16557 }
16558
16559 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16560 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16561 {
16562 rtx offset, addr, mem;
16563
16564 /* We're doing all this to ensure that the immediate offset
16565 fits into the immediate field of 'evldd'. */
16566 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16567
16568 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16569 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16570 mem = gen_rtx_MEM (V2SImode, addr);
16571
16572 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16573 mem);
16574 }
16575 }
9ebbca7d
GK
16576 else
16577 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16578 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16579 {
f676971a
EC
16580 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16581 GEN_INT (info->gp_save_offset
16582 + sp_offset
9ebbca7d 16583 + reg_size * i));
0be76840 16584 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16585
f676971a 16586 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16587 info->first_gp_reg_save + i), mem);
9ebbca7d 16588 }
9878760c 16589
9ebbca7d
GK
16590 /* Restore fpr's if we need to do it without calling a function. */
16591 if (restoring_FPRs_inline)
16592 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16593 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16594 && ! call_used_regs[info->first_fp_reg_save+i]))
16595 {
16596 rtx addr, mem;
16597 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16598 GEN_INT (info->fp_save_offset
16599 + sp_offset
a4f6c312 16600 + 8 * i));
0be76840 16601 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16602
f676971a 16603 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16604 info->first_fp_reg_save + i),
16605 mem);
16606 }
8d30c4ee 16607
9ebbca7d
GK
16608 /* If we saved cr, restore it here. Just those that were used. */
16609 if (info->cr_save_p)
979721f8 16610 {
9ebbca7d 16611 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16612 int count = 0;
f676971a 16613
d296e02e 16614 if (using_mtcr_multiple)
979721f8 16615 {
9ebbca7d 16616 for (i = 0; i < 8; i++)
6fb5fa3c 16617 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16618 count++;
37409796 16619 gcc_assert (count);
e35b9579
GK
16620 }
16621
d296e02e 16622 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16623 {
16624 rtvec p;
16625 int ndx;
f676971a 16626
e35b9579 16627 p = rtvec_alloc (count);
9ebbca7d 16628
e35b9579 16629 ndx = 0;
9ebbca7d 16630 for (i = 0; i < 8; i++)
6fb5fa3c 16631 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16632 {
16633 rtvec r = rtvec_alloc (2);
16634 RTVEC_ELT (r, 0) = r12_rtx;
16635 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16636 RTVEC_ELT (p, ndx) =
f676971a 16637 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16638 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16639 ndx++;
9ebbca7d
GK
16640 }
16641 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16642 gcc_assert (ndx == count);
979721f8
MM
16643 }
16644 else
9ebbca7d 16645 for (i = 0; i < 8; i++)
6fb5fa3c 16646 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16647 {
f676971a 16648 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16649 CR0_REGNO+i),
16650 r12_rtx));
979721f8 16651 }
979721f8
MM
16652 }
16653
9ebbca7d 16654 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16655 have been done. */
16656 if (frame_reg_rtx != sp_reg_rtx)
16657 {
16658 /* This blockage is needed so that sched doesn't decide to move
16659 the sp change before the register restores. */
16660 rs6000_emit_stack_tie ();
45b194f8
AM
16661 if (sp_offset != 0)
16662 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16663 GEN_INT (sp_offset)));
52ff33d0
NF
16664 else
16665 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16666 }
16667 else if (sp_offset != 0)
16668 emit_insn (TARGET_32BIT
16669 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16670 GEN_INT (sp_offset))
16671 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16672 GEN_INT (sp_offset)));
b6c9286a 16673
83720594
RH
16674 if (current_function_calls_eh_return)
16675 {
16676 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16677 emit_insn (TARGET_32BIT
83720594
RH
16678 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16679 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16680 }
16681
9ebbca7d
GK
16682 if (!sibcall)
16683 {
16684 rtvec p;
16685 if (! restoring_FPRs_inline)
16686 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16687 else
16688 p = rtvec_alloc (2);
b6c9286a 16689
e35b9579 16690 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16691 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16692 gen_rtx_REG (Pmode,
1de43f85 16693 LR_REGNO));
9ebbca7d
GK
16694
16695 /* If we have to restore more than two FP registers, branch to the
16696 restore function. It will return to our caller. */
16697 if (! restoring_FPRs_inline)
16698 {
16699 int i;
16700 char rname[30];
520a57c8 16701 const char *alloc_rname;
979721f8 16702
f676971a 16703 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16704 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16705 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16706 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16707 gen_rtx_SYMBOL_REF (Pmode,
16708 alloc_rname));
b6c9286a 16709
9ebbca7d
GK
16710 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16711 {
16712 rtx addr, mem;
16713 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16714 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16715 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16716
f676971a 16717 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16718 gen_rtx_SET (VOIDmode,
16719 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16720 mem);
b6c9286a
MM
16721 }
16722 }
f676971a 16723
9ebbca7d 16724 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16725 }
9878760c
RK
16726}
16727
16728/* Write function epilogue. */
16729
08c148a8 16730static void
f676971a 16731rs6000_output_function_epilogue (FILE *file,
a2369ed3 16732 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16733{
9ebbca7d 16734 if (! HAVE_epilogue)
9878760c 16735 {
9ebbca7d
GK
16736 rtx insn = get_last_insn ();
16737 /* If the last insn was a BARRIER, we don't have to write anything except
16738 the trace table. */
16739 if (GET_CODE (insn) == NOTE)
16740 insn = prev_nonnote_insn (insn);
16741 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16742 {
9ebbca7d
GK
16743 /* This is slightly ugly, but at least we don't have two
16744 copies of the epilogue-emitting code. */
16745 start_sequence ();
16746
16747 /* A NOTE_INSN_DELETED is supposed to be at the start
16748 and end of the "toplevel" insn chain. */
2e040219 16749 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16750 rs6000_emit_epilogue (FALSE);
2e040219 16751 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16752
a3c9585f 16753 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16754 {
16755 rtx insn;
16756 unsigned addr = 0;
16757 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16758 {
16759 INSN_ADDRESSES_NEW (insn, addr);
16760 addr += 4;
16761 }
16762 }
16763
9ebbca7d 16764 if (TARGET_DEBUG_STACK)
a4f6c312 16765 debug_rtx_list (get_insns (), 100);
c9d691e9 16766 final (get_insns (), file, FALSE);
9ebbca7d 16767 end_sequence ();
4697a36c 16768 }
9878760c 16769 }
b4ac57ab 16770
efdba735
SH
16771#if TARGET_MACHO
16772 macho_branch_islands ();
0e5da0be
GK
16773 /* Mach-O doesn't support labels at the end of objects, so if
16774 it looks like we might want one, insert a NOP. */
16775 {
16776 rtx insn = get_last_insn ();
16777 while (insn
16778 && NOTE_P (insn)
a38e7aa5 16779 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16780 insn = PREV_INSN (insn);
f676971a
EC
16781 if (insn
16782 && (LABEL_P (insn)
0e5da0be 16783 || (NOTE_P (insn)
a38e7aa5 16784 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16785 fputs ("\tnop\n", file);
16786 }
16787#endif
16788
9b30bae2 16789 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16790 on its format.
16791
16792 We don't output a traceback table if -finhibit-size-directive was
16793 used. The documentation for -finhibit-size-directive reads
16794 ``don't output a @code{.size} assembler directive, or anything
16795 else that would cause trouble if the function is split in the
16796 middle, and the two halves are placed at locations far apart in
16797 memory.'' The traceback table has this property, since it
16798 includes the offset from the start of the function to the
4d30c363
MM
16799 traceback table itself.
16800
16801 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16802 different traceback table. */
57ac7be9 16803 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16804 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16805 {
69c75916 16806 const char *fname = NULL;
3ac88239 16807 const char *language_string = lang_hooks.name;
6041bf2f 16808 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16809 int i;
57ac7be9 16810 int optional_tbtab;
8097c268 16811 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16812
16813 if (rs6000_traceback == traceback_full)
16814 optional_tbtab = 1;
16815 else if (rs6000_traceback == traceback_part)
16816 optional_tbtab = 0;
16817 else
16818 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16819
69c75916
AM
16820 if (optional_tbtab)
16821 {
16822 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16823 while (*fname == '.') /* V.4 encodes . in the name */
16824 fname++;
16825
16826 /* Need label immediately before tbtab, so we can compute
16827 its offset from the function start. */
16828 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16829 ASM_OUTPUT_LABEL (file, fname);
16830 }
314fc5a9
ILT
16831
16832 /* The .tbtab pseudo-op can only be used for the first eight
16833 expressions, since it can't handle the possibly variable
16834 length fields that follow. However, if you omit the optional
16835 fields, the assembler outputs zeros for all optional fields
16836 anyways, giving each variable length field is minimum length
16837 (as defined in sys/debug.h). Thus we can not use the .tbtab
16838 pseudo-op at all. */
16839
16840 /* An all-zero word flags the start of the tbtab, for debuggers
16841 that have to find it by searching forward from the entry
16842 point or from the current pc. */
19d2d16f 16843 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16844
16845 /* Tbtab format type. Use format type 0. */
19d2d16f 16846 fputs ("\t.byte 0,", file);
314fc5a9 16847
5fc921c1
DE
16848 /* Language type. Unfortunately, there does not seem to be any
16849 official way to discover the language being compiled, so we
16850 use language_string.
16851 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16852 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16853 a number, so for now use 9. */
5fc921c1 16854 if (! strcmp (language_string, "GNU C"))
314fc5a9 16855 i = 0;
6de9cd9a
DN
16856 else if (! strcmp (language_string, "GNU F77")
16857 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16858 i = 1;
8b83775b 16859 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16860 i = 2;
5fc921c1
DE
16861 else if (! strcmp (language_string, "GNU Ada"))
16862 i = 3;
56438901
AM
16863 else if (! strcmp (language_string, "GNU C++")
16864 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16865 i = 9;
9517ead8
AG
16866 else if (! strcmp (language_string, "GNU Java"))
16867 i = 13;
5fc921c1
DE
16868 else if (! strcmp (language_string, "GNU Objective-C"))
16869 i = 14;
314fc5a9 16870 else
37409796 16871 gcc_unreachable ();
314fc5a9
ILT
16872 fprintf (file, "%d,", i);
16873
16874 /* 8 single bit fields: global linkage (not set for C extern linkage,
16875 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16876 from start of procedure stored in tbtab, internal function, function
16877 has controlled storage, function has no toc, function uses fp,
16878 function logs/aborts fp operations. */
16879 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16880 fprintf (file, "%d,",
16881 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16882
16883 /* 6 bitfields: function is interrupt handler, name present in
16884 proc table, function calls alloca, on condition directives
16885 (controls stack walks, 3 bits), saves condition reg, saves
16886 link reg. */
16887 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16888 set up as a frame pointer, even when there is no alloca call. */
16889 fprintf (file, "%d,",
6041bf2f
DE
16890 ((optional_tbtab << 6)
16891 | ((optional_tbtab & frame_pointer_needed) << 5)
16892 | (info->cr_save_p << 1)
16893 | (info->lr_save_p)));
314fc5a9 16894
6041bf2f 16895 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16896 (6 bits). */
16897 fprintf (file, "%d,",
4697a36c 16898 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16899
16900 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16901 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16902
6041bf2f
DE
16903 if (optional_tbtab)
16904 {
16905 /* Compute the parameter info from the function decl argument
16906 list. */
16907 tree decl;
16908 int next_parm_info_bit = 31;
314fc5a9 16909
6041bf2f
DE
16910 for (decl = DECL_ARGUMENTS (current_function_decl);
16911 decl; decl = TREE_CHAIN (decl))
16912 {
16913 rtx parameter = DECL_INCOMING_RTL (decl);
16914 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16915
6041bf2f
DE
16916 if (GET_CODE (parameter) == REG)
16917 {
ebb109ad 16918 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16919 {
16920 int bits;
16921
16922 float_parms++;
16923
37409796
NS
16924 switch (mode)
16925 {
16926 case SFmode:
e41b2a33 16927 case SDmode:
37409796
NS
16928 bits = 0x2;
16929 break;
16930
16931 case DFmode:
7393f7f8 16932 case DDmode:
37409796 16933 case TFmode:
7393f7f8 16934 case TDmode:
37409796
NS
16935 bits = 0x3;
16936 break;
16937
16938 default:
16939 gcc_unreachable ();
16940 }
6041bf2f
DE
16941
16942 /* If only one bit will fit, don't or in this entry. */
16943 if (next_parm_info_bit > 0)
16944 parm_info |= (bits << (next_parm_info_bit - 1));
16945 next_parm_info_bit -= 2;
16946 }
16947 else
16948 {
16949 fixed_parms += ((GET_MODE_SIZE (mode)
16950 + (UNITS_PER_WORD - 1))
16951 / UNITS_PER_WORD);
16952 next_parm_info_bit -= 1;
16953 }
16954 }
16955 }
16956 }
314fc5a9
ILT
16957
16958 /* Number of fixed point parameters. */
16959 /* This is actually the number of words of fixed point parameters; thus
16960 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16961 fprintf (file, "%d,", fixed_parms);
16962
16963 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16964 all on stack. */
16965 /* This is actually the number of fp registers that hold parameters;
16966 and thus the maximum value is 13. */
16967 /* Set parameters on stack bit if parameters are not in their original
16968 registers, regardless of whether they are on the stack? Xlc
16969 seems to set the bit when not optimizing. */
16970 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16971
6041bf2f
DE
16972 if (! optional_tbtab)
16973 return;
16974
314fc5a9
ILT
16975 /* Optional fields follow. Some are variable length. */
16976
16977 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16978 11 double float. */
16979 /* There is an entry for each parameter in a register, in the order that
16980 they occur in the parameter list. Any intervening arguments on the
16981 stack are ignored. If the list overflows a long (max possible length
16982 34 bits) then completely leave off all elements that don't fit. */
16983 /* Only emit this long if there was at least one parameter. */
16984 if (fixed_parms || float_parms)
16985 fprintf (file, "\t.long %d\n", parm_info);
16986
16987 /* Offset from start of code to tb table. */
19d2d16f 16988 fputs ("\t.long ", file);
314fc5a9 16989 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16990 if (TARGET_AIX)
16991 RS6000_OUTPUT_BASENAME (file, fname);
16992 else
16993 assemble_name (file, fname);
16994 putc ('-', file);
16995 rs6000_output_function_entry (file, fname);
19d2d16f 16996 putc ('\n', file);
314fc5a9
ILT
16997
16998 /* Interrupt handler mask. */
16999 /* Omit this long, since we never set the interrupt handler bit
17000 above. */
17001
17002 /* Number of CTL (controlled storage) anchors. */
17003 /* Omit this long, since the has_ctl bit is never set above. */
17004
17005 /* Displacement into stack of each CTL anchor. */
17006 /* Omit this list of longs, because there are no CTL anchors. */
17007
17008 /* Length of function name. */
69c75916
AM
17009 if (*fname == '*')
17010 ++fname;
296b8152 17011 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17012
17013 /* Function name. */
17014 assemble_string (fname, strlen (fname));
17015
17016 /* Register for alloca automatic storage; this is always reg 31.
17017 Only emit this if the alloca bit was set above. */
17018 if (frame_pointer_needed)
19d2d16f 17019 fputs ("\t.byte 31\n", file);
b1765bde
DE
17020
17021 fputs ("\t.align 2\n", file);
9b30bae2 17022 }
9878760c 17023}
17167fd8 17024\f
a4f6c312
SS
17025/* A C compound statement that outputs the assembler code for a thunk
17026 function, used to implement C++ virtual function calls with
17027 multiple inheritance. The thunk acts as a wrapper around a virtual
17028 function, adjusting the implicit object parameter before handing
17029 control off to the real function.
17030
17031 First, emit code to add the integer DELTA to the location that
17032 contains the incoming first argument. Assume that this argument
17033 contains a pointer, and is the one used to pass the `this' pointer
17034 in C++. This is the incoming argument *before* the function
17035 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17036 values of all other incoming arguments.
17167fd8
MM
17037
17038 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17039 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17040 not touch the return address. Hence returning from FUNCTION will
17041 return to whoever called the current `thunk'.
17167fd8 17042
a4f6c312
SS
17043 The effect must be as if FUNCTION had been called directly with the
17044 adjusted first argument. This macro is responsible for emitting
17045 all of the code for a thunk function; output_function_prologue()
17046 and output_function_epilogue() are not invoked.
17167fd8 17047
a4f6c312
SS
17048 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17049 been extracted from it.) It might possibly be useful on some
17050 targets, but probably not.
17167fd8 17051
a4f6c312
SS
17052 If you do not define this macro, the target-independent code in the
17053 C++ frontend will generate a less efficient heavyweight thunk that
17054 calls FUNCTION instead of jumping to it. The generic approach does
17055 not support varargs. */
17167fd8 17056
3961e8fe 17057static void
f676971a
EC
17058rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17059 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17060 tree function)
17167fd8 17061{
5b71a4e7 17062 rtx this, insn, funexp;
17167fd8 17063
5b71a4e7 17064 reload_completed = 1;
fe3ad572 17065 epilogue_completed = 1;
56a7189a 17066
5b71a4e7 17067 /* Mark the end of the (empty) prologue. */
2e040219 17068 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17069
5b71a4e7
DE
17070 /* Find the "this" pointer. If the function returns a structure,
17071 the structure return pointer is in r3. */
61f71b34 17072 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17073 this = gen_rtx_REG (Pmode, 4);
56a7189a 17074 else
5b71a4e7 17075 this = gen_rtx_REG (Pmode, 3);
17167fd8 17076
5b71a4e7
DE
17077 /* Apply the constant offset, if required. */
17078 if (delta)
17079 {
17080 rtx delta_rtx = GEN_INT (delta);
17081 emit_insn (TARGET_32BIT
17082 ? gen_addsi3 (this, this, delta_rtx)
17083 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17084 }
17085
5b71a4e7
DE
17086 /* Apply the offset from the vtable, if required. */
17087 if (vcall_offset)
17167fd8 17088 {
5b71a4e7
DE
17089 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17090 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17091
5b71a4e7 17092 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17093 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17094 {
17095 emit_insn (TARGET_32BIT
17096 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17097 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17098 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17099 }
17100 else
17101 {
17102 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17103
17104 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17105 }
5b71a4e7
DE
17106 emit_insn (TARGET_32BIT
17107 ? gen_addsi3 (this, this, tmp)
17108 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17109 }
17110
5b71a4e7
DE
17111 /* Generate a tail call to the target function. */
17112 if (!TREE_USED (function))
17113 {
17114 assemble_external (function);
17115 TREE_USED (function) = 1;
17116 }
17117 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17118 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17119
17120#if TARGET_MACHO
ab82a49f 17121 if (MACHOPIC_INDIRECT)
5b71a4e7 17122 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17123#endif
5b71a4e7
DE
17124
17125 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17126 generate sibcall RTL explicitly. */
5b71a4e7
DE
17127 insn = emit_call_insn (
17128 gen_rtx_PARALLEL (VOIDmode,
17129 gen_rtvec (4,
17130 gen_rtx_CALL (VOIDmode,
17131 funexp, const0_rtx),
17132 gen_rtx_USE (VOIDmode, const0_rtx),
17133 gen_rtx_USE (VOIDmode,
17134 gen_rtx_REG (SImode,
1de43f85 17135 LR_REGNO)),
5b71a4e7
DE
17136 gen_rtx_RETURN (VOIDmode))));
17137 SIBLING_CALL_P (insn) = 1;
17138 emit_barrier ();
17139
17140 /* Run just enough of rest_of_compilation to get the insns emitted.
17141 There's not really enough bulk here to make other passes such as
17142 instruction scheduling worth while. Note that use_thunk calls
17143 assemble_start_function and assemble_end_function. */
17144 insn = get_insns ();
55e092c4 17145 insn_locators_alloc ();
5b71a4e7
DE
17146 shorten_branches (insn);
17147 final_start_function (insn, file, 1);
c9d691e9 17148 final (insn, file, 1);
5b71a4e7
DE
17149 final_end_function ();
17150
17151 reload_completed = 0;
fe3ad572 17152 epilogue_completed = 0;
9ebbca7d 17153}
9ebbca7d
GK
17154\f
17155/* A quick summary of the various types of 'constant-pool tables'
17156 under PowerPC:
17157
f676971a 17158 Target Flags Name One table per
9ebbca7d
GK
17159 AIX (none) AIX TOC object file
17160 AIX -mfull-toc AIX TOC object file
17161 AIX -mminimal-toc AIX minimal TOC translation unit
17162 SVR4/EABI (none) SVR4 SDATA object file
17163 SVR4/EABI -fpic SVR4 pic object file
17164 SVR4/EABI -fPIC SVR4 PIC translation unit
17165 SVR4/EABI -mrelocatable EABI TOC function
17166 SVR4/EABI -maix AIX TOC object file
f676971a 17167 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17168 AIX minimal TOC translation unit
17169
17170 Name Reg. Set by entries contains:
17171 made by addrs? fp? sum?
17172
17173 AIX TOC 2 crt0 as Y option option
17174 AIX minimal TOC 30 prolog gcc Y Y option
17175 SVR4 SDATA 13 crt0 gcc N Y N
17176 SVR4 pic 30 prolog ld Y not yet N
17177 SVR4 PIC 30 prolog gcc Y option option
17178 EABI TOC 30 prolog gcc Y option option
17179
17180*/
17181
9ebbca7d
GK
17182/* Hash functions for the hash table. */
17183
17184static unsigned
a2369ed3 17185rs6000_hash_constant (rtx k)
9ebbca7d 17186{
46b33600
RH
17187 enum rtx_code code = GET_CODE (k);
17188 enum machine_mode mode = GET_MODE (k);
17189 unsigned result = (code << 3) ^ mode;
17190 const char *format;
17191 int flen, fidx;
f676971a 17192
46b33600
RH
17193 format = GET_RTX_FORMAT (code);
17194 flen = strlen (format);
17195 fidx = 0;
9ebbca7d 17196
46b33600
RH
17197 switch (code)
17198 {
17199 case LABEL_REF:
17200 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17201
17202 case CONST_DOUBLE:
17203 if (mode != VOIDmode)
17204 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17205 flen = 2;
17206 break;
17207
17208 case CODE_LABEL:
17209 fidx = 3;
17210 break;
17211
17212 default:
17213 break;
17214 }
9ebbca7d
GK
17215
17216 for (; fidx < flen; fidx++)
17217 switch (format[fidx])
17218 {
17219 case 's':
17220 {
17221 unsigned i, len;
17222 const char *str = XSTR (k, fidx);
17223 len = strlen (str);
17224 result = result * 613 + len;
17225 for (i = 0; i < len; i++)
17226 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17227 break;
17228 }
9ebbca7d
GK
17229 case 'u':
17230 case 'e':
17231 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17232 break;
17233 case 'i':
17234 case 'n':
17235 result = result * 613 + (unsigned) XINT (k, fidx);
17236 break;
17237 case 'w':
17238 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17239 result = result * 613 + (unsigned) XWINT (k, fidx);
17240 else
17241 {
17242 size_t i;
9390387d 17243 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17244 result = result * 613 + (unsigned) (XWINT (k, fidx)
17245 >> CHAR_BIT * i);
17246 }
17247 break;
09501938
DE
17248 case '0':
17249 break;
9ebbca7d 17250 default:
37409796 17251 gcc_unreachable ();
9ebbca7d 17252 }
46b33600 17253
9ebbca7d
GK
17254 return result;
17255}
17256
17257static unsigned
a2369ed3 17258toc_hash_function (const void *hash_entry)
9ebbca7d 17259{
f676971a 17260 const struct toc_hash_struct *thc =
a9098fd0
GK
17261 (const struct toc_hash_struct *) hash_entry;
17262 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17263}
17264
17265/* Compare H1 and H2 for equivalence. */
17266
17267static int
a2369ed3 17268toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17269{
17270 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17271 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17272
a9098fd0
GK
17273 if (((const struct toc_hash_struct *) h1)->key_mode
17274 != ((const struct toc_hash_struct *) h2)->key_mode)
17275 return 0;
17276
5692c7bc 17277 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17278}
17279
28e510bd
MM
17280/* These are the names given by the C++ front-end to vtables, and
17281 vtable-like objects. Ideally, this logic should not be here;
17282 instead, there should be some programmatic way of inquiring as
17283 to whether or not an object is a vtable. */
17284
17285#define VTABLE_NAME_P(NAME) \
9390387d 17286 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17287 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17288 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17289 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17290 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17291
17292void
a2369ed3 17293rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17294{
17295 /* Currently C++ toc references to vtables can be emitted before it
17296 is decided whether the vtable is public or private. If this is
17297 the case, then the linker will eventually complain that there is
f676971a 17298 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17299 we emit the TOC reference to reference the symbol and not the
17300 section. */
17301 const char *name = XSTR (x, 0);
54ee9799 17302
f676971a 17303 if (VTABLE_NAME_P (name))
54ee9799
DE
17304 {
17305 RS6000_OUTPUT_BASENAME (file, name);
17306 }
17307 else
17308 assemble_name (file, name);
28e510bd
MM
17309}
17310
a4f6c312
SS
17311/* Output a TOC entry. We derive the entry name from what is being
17312 written. */
9878760c
RK
17313
17314void
a2369ed3 17315output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17316{
17317 char buf[256];
3cce094d 17318 const char *name = buf;
ec940faa 17319 const char *real_name;
9878760c 17320 rtx base = x;
16fdeb48 17321 HOST_WIDE_INT offset = 0;
9878760c 17322
37409796 17323 gcc_assert (!TARGET_NO_TOC);
4697a36c 17324
9ebbca7d
GK
17325 /* When the linker won't eliminate them, don't output duplicate
17326 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17327 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17328 CODE_LABELs. */
17329 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17330 {
17331 struct toc_hash_struct *h;
17332 void * * found;
f676971a 17333
17211ab5 17334 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17335 time because GGC is not initialized at that point. */
17211ab5 17336 if (toc_hash_table == NULL)
f676971a 17337 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17338 toc_hash_eq, NULL);
17339
9ebbca7d
GK
17340 h = ggc_alloc (sizeof (*h));
17341 h->key = x;
a9098fd0 17342 h->key_mode = mode;
9ebbca7d 17343 h->labelno = labelno;
f676971a 17344
9ebbca7d
GK
17345 found = htab_find_slot (toc_hash_table, h, 1);
17346 if (*found == NULL)
17347 *found = h;
f676971a 17348 else /* This is indeed a duplicate.
9ebbca7d
GK
17349 Set this label equal to that label. */
17350 {
17351 fputs ("\t.set ", file);
17352 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17353 fprintf (file, "%d,", labelno);
17354 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17355 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17356 found)->labelno));
17357 return;
17358 }
17359 }
17360
17361 /* If we're going to put a double constant in the TOC, make sure it's
17362 aligned properly when strict alignment is on. */
ff1720ed
RK
17363 if (GET_CODE (x) == CONST_DOUBLE
17364 && STRICT_ALIGNMENT
a9098fd0 17365 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17366 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17367 ASM_OUTPUT_ALIGN (file, 3);
17368 }
17369
4977bab6 17370 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17371
37c37a57
RK
17372 /* Handle FP constants specially. Note that if we have a minimal
17373 TOC, things we put here aren't actually in the TOC, so we can allow
17374 FP constants. */
00b79d54
BE
17375 if (GET_CODE (x) == CONST_DOUBLE &&
17376 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17377 {
17378 REAL_VALUE_TYPE rv;
17379 long k[4];
17380
17381 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17382 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17383 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17384 else
17385 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17386
17387 if (TARGET_64BIT)
17388 {
17389 if (TARGET_MINIMAL_TOC)
17390 fputs (DOUBLE_INT_ASM_OP, file);
17391 else
17392 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17393 k[0] & 0xffffffff, k[1] & 0xffffffff,
17394 k[2] & 0xffffffff, k[3] & 0xffffffff);
17395 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17396 k[0] & 0xffffffff, k[1] & 0xffffffff,
17397 k[2] & 0xffffffff, k[3] & 0xffffffff);
17398 return;
17399 }
17400 else
17401 {
17402 if (TARGET_MINIMAL_TOC)
17403 fputs ("\t.long ", file);
17404 else
17405 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17406 k[0] & 0xffffffff, k[1] & 0xffffffff,
17407 k[2] & 0xffffffff, k[3] & 0xffffffff);
17408 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17409 k[0] & 0xffffffff, k[1] & 0xffffffff,
17410 k[2] & 0xffffffff, k[3] & 0xffffffff);
17411 return;
17412 }
17413 }
00b79d54
BE
17414 else if (GET_CODE (x) == CONST_DOUBLE &&
17415 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17416 {
042259f2
DE
17417 REAL_VALUE_TYPE rv;
17418 long k[2];
0adc764e 17419
042259f2 17420 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17421
17422 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17423 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17424 else
17425 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17426
13ded975
DE
17427 if (TARGET_64BIT)
17428 {
17429 if (TARGET_MINIMAL_TOC)
2bfcf297 17430 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17431 else
2f0552b6
AM
17432 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17433 k[0] & 0xffffffff, k[1] & 0xffffffff);
17434 fprintf (file, "0x%lx%08lx\n",
17435 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17436 return;
17437 }
1875cc88 17438 else
13ded975
DE
17439 {
17440 if (TARGET_MINIMAL_TOC)
2bfcf297 17441 fputs ("\t.long ", file);
13ded975 17442 else
2f0552b6
AM
17443 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17444 k[0] & 0xffffffff, k[1] & 0xffffffff);
17445 fprintf (file, "0x%lx,0x%lx\n",
17446 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17447 return;
17448 }
9878760c 17449 }
00b79d54
BE
17450 else if (GET_CODE (x) == CONST_DOUBLE &&
17451 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17452 {
042259f2
DE
17453 REAL_VALUE_TYPE rv;
17454 long l;
9878760c 17455
042259f2 17456 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17457 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17458 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17459 else
17460 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17461
31bfaa0b
DE
17462 if (TARGET_64BIT)
17463 {
17464 if (TARGET_MINIMAL_TOC)
2bfcf297 17465 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17466 else
2f0552b6
AM
17467 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17468 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17469 return;
17470 }
042259f2 17471 else
31bfaa0b
DE
17472 {
17473 if (TARGET_MINIMAL_TOC)
2bfcf297 17474 fputs ("\t.long ", file);
31bfaa0b 17475 else
2f0552b6
AM
17476 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17477 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17478 return;
17479 }
042259f2 17480 }
f176e826 17481 else if (GET_MODE (x) == VOIDmode
a9098fd0 17482 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17483 {
e2c953b6 17484 unsigned HOST_WIDE_INT low;
042259f2
DE
17485 HOST_WIDE_INT high;
17486
17487 if (GET_CODE (x) == CONST_DOUBLE)
17488 {
17489 low = CONST_DOUBLE_LOW (x);
17490 high = CONST_DOUBLE_HIGH (x);
17491 }
17492 else
17493#if HOST_BITS_PER_WIDE_INT == 32
17494 {
17495 low = INTVAL (x);
0858c623 17496 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17497 }
17498#else
17499 {
c4ad648e
AM
17500 low = INTVAL (x) & 0xffffffff;
17501 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17502 }
17503#endif
9878760c 17504
a9098fd0
GK
17505 /* TOC entries are always Pmode-sized, but since this
17506 is a bigendian machine then if we're putting smaller
17507 integer constants in the TOC we have to pad them.
17508 (This is still a win over putting the constants in
17509 a separate constant pool, because then we'd have
02a4ec28
FS
17510 to have both a TOC entry _and_ the actual constant.)
17511
17512 For a 32-bit target, CONST_INT values are loaded and shifted
17513 entirely within `low' and can be stored in one TOC entry. */
17514
37409796
NS
17515 /* It would be easy to make this work, but it doesn't now. */
17516 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17517
17518 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17519 {
17520#if HOST_BITS_PER_WIDE_INT == 32
17521 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17522 POINTER_SIZE, &low, &high, 0);
17523#else
17524 low |= high << 32;
17525 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17526 high = (HOST_WIDE_INT) low >> 32;
17527 low &= 0xffffffff;
17528#endif
17529 }
a9098fd0 17530
13ded975
DE
17531 if (TARGET_64BIT)
17532 {
17533 if (TARGET_MINIMAL_TOC)
2bfcf297 17534 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17535 else
2f0552b6
AM
17536 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17537 (long) high & 0xffffffff, (long) low & 0xffffffff);
17538 fprintf (file, "0x%lx%08lx\n",
17539 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17540 return;
17541 }
1875cc88 17542 else
13ded975 17543 {
02a4ec28
FS
17544 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17545 {
17546 if (TARGET_MINIMAL_TOC)
2bfcf297 17547 fputs ("\t.long ", file);
02a4ec28 17548 else
2bfcf297 17549 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17550 (long) high & 0xffffffff, (long) low & 0xffffffff);
17551 fprintf (file, "0x%lx,0x%lx\n",
17552 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17553 }
13ded975 17554 else
02a4ec28
FS
17555 {
17556 if (TARGET_MINIMAL_TOC)
2bfcf297 17557 fputs ("\t.long ", file);
02a4ec28 17558 else
2f0552b6
AM
17559 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17560 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17561 }
13ded975
DE
17562 return;
17563 }
9878760c
RK
17564 }
17565
17566 if (GET_CODE (x) == CONST)
17567 {
37409796 17568 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17569
9878760c
RK
17570 base = XEXP (XEXP (x, 0), 0);
17571 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17572 }
f676971a 17573
37409796
NS
17574 switch (GET_CODE (base))
17575 {
17576 case SYMBOL_REF:
17577 name = XSTR (base, 0);
17578 break;
17579
17580 case LABEL_REF:
17581 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17582 CODE_LABEL_NUMBER (XEXP (base, 0)));
17583 break;
17584
17585 case CODE_LABEL:
17586 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17587 break;
17588
17589 default:
17590 gcc_unreachable ();
17591 }
9878760c 17592
772c5265 17593 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17594 if (TARGET_MINIMAL_TOC)
2bfcf297 17595 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17596 else
17597 {
b6c9286a 17598 fprintf (file, "\t.tc %s", real_name);
9878760c 17599
1875cc88 17600 if (offset < 0)
16fdeb48 17601 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17602 else if (offset)
16fdeb48 17603 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17604
19d2d16f 17605 fputs ("[TC],", file);
1875cc88 17606 }
581bc4de
MM
17607
17608 /* Currently C++ toc references to vtables can be emitted before it
17609 is decided whether the vtable is public or private. If this is
17610 the case, then the linker will eventually complain that there is
17611 a TOC reference to an unknown section. Thus, for vtables only,
17612 we emit the TOC reference to reference the symbol and not the
17613 section. */
28e510bd 17614 if (VTABLE_NAME_P (name))
581bc4de 17615 {
54ee9799 17616 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17617 if (offset < 0)
16fdeb48 17618 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17619 else if (offset > 0)
16fdeb48 17620 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17621 }
17622 else
17623 output_addr_const (file, x);
19d2d16f 17624 putc ('\n', file);
9878760c
RK
17625}
17626\f
17627/* Output an assembler pseudo-op to write an ASCII string of N characters
17628 starting at P to FILE.
17629
17630 On the RS/6000, we have to do this using the .byte operation and
17631 write out special characters outside the quoted string.
17632 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17633 so we must artificially break them up early. */
9878760c
RK
17634
17635void
a2369ed3 17636output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17637{
17638 char c;
17639 int i, count_string;
d330fd93
KG
17640 const char *for_string = "\t.byte \"";
17641 const char *for_decimal = "\t.byte ";
17642 const char *to_close = NULL;
9878760c
RK
17643
17644 count_string = 0;
17645 for (i = 0; i < n; i++)
17646 {
17647 c = *p++;
17648 if (c >= ' ' && c < 0177)
17649 {
17650 if (for_string)
17651 fputs (for_string, file);
17652 putc (c, file);
17653
17654 /* Write two quotes to get one. */
17655 if (c == '"')
17656 {
17657 putc (c, file);
17658 ++count_string;
17659 }
17660
17661 for_string = NULL;
17662 for_decimal = "\"\n\t.byte ";
17663 to_close = "\"\n";
17664 ++count_string;
17665
17666 if (count_string >= 512)
17667 {
17668 fputs (to_close, file);
17669
17670 for_string = "\t.byte \"";
17671 for_decimal = "\t.byte ";
17672 to_close = NULL;
17673 count_string = 0;
17674 }
17675 }
17676 else
17677 {
17678 if (for_decimal)
17679 fputs (for_decimal, file);
17680 fprintf (file, "%d", c);
17681
17682 for_string = "\n\t.byte \"";
17683 for_decimal = ", ";
17684 to_close = "\n";
17685 count_string = 0;
17686 }
17687 }
17688
17689 /* Now close the string if we have written one. Then end the line. */
17690 if (to_close)
9ebbca7d 17691 fputs (to_close, file);
9878760c
RK
17692}
17693\f
17694/* Generate a unique section name for FILENAME for a section type
17695 represented by SECTION_DESC. Output goes into BUF.
17696
17697 SECTION_DESC can be any string, as long as it is different for each
17698 possible section type.
17699
17700 We name the section in the same manner as xlc. The name begins with an
17701 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17702 names) with the last period replaced by the string SECTION_DESC. If
17703 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17704 the name. */
9878760c
RK
17705
17706void
f676971a 17707rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17708 const char *section_desc)
9878760c 17709{
9ebbca7d 17710 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17711 char *p;
17712 int len;
9878760c
RK
17713
17714 after_last_slash = filename;
17715 for (q = filename; *q; q++)
11e5fe42
RK
17716 {
17717 if (*q == '/')
17718 after_last_slash = q + 1;
17719 else if (*q == '.')
17720 last_period = q;
17721 }
9878760c 17722
11e5fe42 17723 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17724 *buf = (char *) xmalloc (len);
9878760c
RK
17725
17726 p = *buf;
17727 *p++ = '_';
17728
17729 for (q = after_last_slash; *q; q++)
17730 {
11e5fe42 17731 if (q == last_period)
c4ad648e 17732 {
9878760c
RK
17733 strcpy (p, section_desc);
17734 p += strlen (section_desc);
e3981aab 17735 break;
c4ad648e 17736 }
9878760c 17737
e9a780ec 17738 else if (ISALNUM (*q))
c4ad648e 17739 *p++ = *q;
9878760c
RK
17740 }
17741
11e5fe42 17742 if (last_period == 0)
9878760c
RK
17743 strcpy (p, section_desc);
17744 else
17745 *p = '\0';
17746}
e165f3f0 17747\f
a4f6c312 17748/* Emit profile function. */
411707f4 17749
411707f4 17750void
a2369ed3 17751output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17752{
858081ad
AH
17753 /* Non-standard profiling for kernels, which just saves LR then calls
17754 _mcount without worrying about arg saves. The idea is to change
17755 the function prologue as little as possible as it isn't easy to
17756 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17757 if (TARGET_PROFILE_KERNEL)
17758 return;
17759
8480e480
CC
17760 if (DEFAULT_ABI == ABI_AIX)
17761 {
9739c90c
JJ
17762#ifndef NO_PROFILE_COUNTERS
17763# define NO_PROFILE_COUNTERS 0
17764#endif
f676971a 17765 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17766 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17767 else
17768 {
17769 char buf[30];
17770 const char *label_name;
17771 rtx fun;
411707f4 17772
9739c90c
JJ
17773 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17774 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17775 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17776
9739c90c
JJ
17777 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17778 fun, Pmode);
17779 }
8480e480 17780 }
ee890fe2
SS
17781 else if (DEFAULT_ABI == ABI_DARWIN)
17782 {
d5fa86ba 17783 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17784 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17785
17786 /* Be conservative and always set this, at least for now. */
17787 current_function_uses_pic_offset_table = 1;
17788
17789#if TARGET_MACHO
17790 /* For PIC code, set up a stub and collect the caller's address
17791 from r0, which is where the prologue puts it. */
11abc112
MM
17792 if (MACHOPIC_INDIRECT
17793 && current_function_uses_pic_offset_table)
17794 caller_addr_regno = 0;
ee890fe2
SS
17795#endif
17796 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17797 0, VOIDmode, 1,
17798 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17799 }
411707f4
CC
17800}
17801
a4f6c312 17802/* Write function profiler code. */
e165f3f0
RK
17803
17804void
a2369ed3 17805output_function_profiler (FILE *file, int labelno)
e165f3f0 17806{
3daf36a4 17807 char buf[100];
e165f3f0 17808
38c1f2d7 17809 switch (DEFAULT_ABI)
3daf36a4 17810 {
38c1f2d7 17811 default:
37409796 17812 gcc_unreachable ();
38c1f2d7
MM
17813
17814 case ABI_V4:
09eeeacb
AM
17815 if (!TARGET_32BIT)
17816 {
d4ee4d25 17817 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17818 return;
17819 }
ffcfcb5f 17820 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17821 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17822 if (NO_PROFILE_COUNTERS)
17823 {
17824 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17825 reg_names[0], reg_names[1]);
17826 }
17827 else if (TARGET_SECURE_PLT && flag_pic)
17828 {
17829 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17830 reg_names[0], reg_names[1]);
17831 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17832 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17833 reg_names[12], reg_names[12]);
17834 assemble_name (file, buf);
17835 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17836 assemble_name (file, buf);
17837 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17838 }
17839 else if (flag_pic == 1)
38c1f2d7 17840 {
dfdfa60f 17841 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17842 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17843 reg_names[0], reg_names[1]);
17167fd8 17844 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17845 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17846 assemble_name (file, buf);
17167fd8 17847 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17848 }
9ebbca7d 17849 else if (flag_pic > 1)
38c1f2d7 17850 {
71625f3d
AM
17851 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17852 reg_names[0], reg_names[1]);
9ebbca7d 17853 /* Now, we need to get the address of the label. */
71625f3d 17854 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17855 assemble_name (file, buf);
9ebbca7d
GK
17856 fputs ("-.\n1:", file);
17857 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17858 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17859 reg_names[0], reg_names[11]);
17860 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17861 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17862 }
38c1f2d7
MM
17863 else
17864 {
17167fd8 17865 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17866 assemble_name (file, buf);
dfdfa60f 17867 fputs ("@ha\n", file);
71625f3d
AM
17868 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17869 reg_names[0], reg_names[1]);
a260abc9 17870 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17871 assemble_name (file, buf);
17167fd8 17872 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17873 }
17874
50d440bc 17875 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17876 fprintf (file, "\tbl %s%s\n",
17877 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17878 break;
17879
17880 case ABI_AIX:
ee890fe2 17881 case ABI_DARWIN:
ffcfcb5f
AM
17882 if (!TARGET_PROFILE_KERNEL)
17883 {
a3c9585f 17884 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17885 }
17886 else
17887 {
37409796 17888 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17889
17890 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17891 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17892
6de9cd9a 17893 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17894 {
17895 asm_fprintf (file, "\tstd %s,24(%s)\n",
17896 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17897 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17898 asm_fprintf (file, "\tld %s,24(%s)\n",
17899 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17900 }
17901 else
17902 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17903 }
38c1f2d7
MM
17904 break;
17905 }
e165f3f0 17906}
a251ffd0 17907
b54cf83a 17908\f
44cd321e
PS
17909
17910/* The following variable value is the last issued insn. */
17911
17912static rtx last_scheduled_insn;
17913
17914/* The following variable helps to balance issuing of load and
17915 store instructions */
17916
17917static int load_store_pendulum;
17918
b54cf83a
DE
17919/* Power4 load update and store update instructions are cracked into a
17920 load or store and an integer insn which are executed in the same cycle.
17921 Branches have their own dispatch slot which does not count against the
17922 GCC issue rate, but it changes the program flow so there are no other
17923 instructions to issue in this cycle. */
17924
17925static int
f676971a
EC
17926rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17927 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17928 rtx insn, int more)
b54cf83a 17929{
44cd321e 17930 last_scheduled_insn = insn;
b54cf83a
DE
17931 if (GET_CODE (PATTERN (insn)) == USE
17932 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17933 {
17934 cached_can_issue_more = more;
17935 return cached_can_issue_more;
17936 }
17937
17938 if (insn_terminates_group_p (insn, current_group))
17939 {
17940 cached_can_issue_more = 0;
17941 return cached_can_issue_more;
17942 }
b54cf83a 17943
d296e02e
AP
17944 /* If no reservation, but reach here */
17945 if (recog_memoized (insn) < 0)
17946 return more;
17947
ec507f2d 17948 if (rs6000_sched_groups)
b54cf83a 17949 {
cbe26ab8 17950 if (is_microcoded_insn (insn))
44cd321e 17951 cached_can_issue_more = 0;
cbe26ab8 17952 else if (is_cracked_insn (insn))
44cd321e
PS
17953 cached_can_issue_more = more > 2 ? more - 2 : 0;
17954 else
17955 cached_can_issue_more = more - 1;
17956
17957 return cached_can_issue_more;
b54cf83a 17958 }
165b263e 17959
d296e02e
AP
17960 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17961 return 0;
17962
44cd321e
PS
17963 cached_can_issue_more = more - 1;
17964 return cached_can_issue_more;
b54cf83a
DE
17965}
17966
a251ffd0
TG
17967/* Adjust the cost of a scheduling dependency. Return the new cost of
17968 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17969
c237e94a 17970static int
0a4f0294 17971rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17972{
44cd321e 17973 enum attr_type attr_type;
a251ffd0 17974
44cd321e 17975 if (! recog_memoized (insn))
a251ffd0
TG
17976 return 0;
17977
44cd321e 17978 switch (REG_NOTE_KIND (link))
a251ffd0 17979 {
44cd321e
PS
17980 case REG_DEP_TRUE:
17981 {
17982 /* Data dependency; DEP_INSN writes a register that INSN reads
17983 some cycles later. */
17984
17985 /* Separate a load from a narrower, dependent store. */
17986 if (rs6000_sched_groups
17987 && GET_CODE (PATTERN (insn)) == SET
17988 && GET_CODE (PATTERN (dep_insn)) == SET
17989 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17990 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17991 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17992 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17993 return cost + 14;
17994
17995 attr_type = get_attr_type (insn);
17996
17997 switch (attr_type)
17998 {
17999 case TYPE_JMPREG:
18000 /* Tell the first scheduling pass about the latency between
18001 a mtctr and bctr (and mtlr and br/blr). The first
18002 scheduling pass will not know about this latency since
18003 the mtctr instruction, which has the latency associated
18004 to it, will be generated by reload. */
18005 return TARGET_POWER ? 5 : 4;
18006 case TYPE_BRANCH:
18007 /* Leave some extra cycles between a compare and its
18008 dependent branch, to inhibit expensive mispredicts. */
18009 if ((rs6000_cpu_attr == CPU_PPC603
18010 || rs6000_cpu_attr == CPU_PPC604
18011 || rs6000_cpu_attr == CPU_PPC604E
18012 || rs6000_cpu_attr == CPU_PPC620
18013 || rs6000_cpu_attr == CPU_PPC630
18014 || rs6000_cpu_attr == CPU_PPC750
18015 || rs6000_cpu_attr == CPU_PPC7400
18016 || rs6000_cpu_attr == CPU_PPC7450
18017 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18018 || rs6000_cpu_attr == CPU_POWER5
18019 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18020 && recog_memoized (dep_insn)
18021 && (INSN_CODE (dep_insn) >= 0))
982afe02 18022
44cd321e
PS
18023 switch (get_attr_type (dep_insn))
18024 {
18025 case TYPE_CMP:
18026 case TYPE_COMPARE:
18027 case TYPE_DELAYED_COMPARE:
18028 case TYPE_IMUL_COMPARE:
18029 case TYPE_LMUL_COMPARE:
18030 case TYPE_FPCOMPARE:
18031 case TYPE_CR_LOGICAL:
18032 case TYPE_DELAYED_CR:
18033 return cost + 2;
18034 default:
18035 break;
18036 }
18037 break;
18038
18039 case TYPE_STORE:
18040 case TYPE_STORE_U:
18041 case TYPE_STORE_UX:
18042 case TYPE_FPSTORE:
18043 case TYPE_FPSTORE_U:
18044 case TYPE_FPSTORE_UX:
18045 if ((rs6000_cpu == PROCESSOR_POWER6)
18046 && recog_memoized (dep_insn)
18047 && (INSN_CODE (dep_insn) >= 0))
18048 {
18049
18050 if (GET_CODE (PATTERN (insn)) != SET)
18051 /* If this happens, we have to extend this to schedule
18052 optimally. Return default for now. */
18053 return cost;
18054
18055 /* Adjust the cost for the case where the value written
18056 by a fixed point operation is used as the address
18057 gen value on a store. */
18058 switch (get_attr_type (dep_insn))
18059 {
18060 case TYPE_LOAD:
18061 case TYPE_LOAD_U:
18062 case TYPE_LOAD_UX:
18063 case TYPE_CNTLZ:
18064 {
18065 if (! store_data_bypass_p (dep_insn, insn))
18066 return 4;
18067 break;
18068 }
18069 case TYPE_LOAD_EXT:
18070 case TYPE_LOAD_EXT_U:
18071 case TYPE_LOAD_EXT_UX:
18072 case TYPE_VAR_SHIFT_ROTATE:
18073 case TYPE_VAR_DELAYED_COMPARE:
18074 {
18075 if (! store_data_bypass_p (dep_insn, insn))
18076 return 6;
18077 break;
18078 }
18079 case TYPE_INTEGER:
18080 case TYPE_COMPARE:
18081 case TYPE_FAST_COMPARE:
18082 case TYPE_EXTS:
18083 case TYPE_SHIFT:
18084 case TYPE_INSERT_WORD:
18085 case TYPE_INSERT_DWORD:
18086 case TYPE_FPLOAD_U:
18087 case TYPE_FPLOAD_UX:
18088 case TYPE_STORE_U:
18089 case TYPE_STORE_UX:
18090 case TYPE_FPSTORE_U:
18091 case TYPE_FPSTORE_UX:
18092 {
18093 if (! store_data_bypass_p (dep_insn, insn))
18094 return 3;
18095 break;
18096 }
18097 case TYPE_IMUL:
18098 case TYPE_IMUL2:
18099 case TYPE_IMUL3:
18100 case TYPE_LMUL:
18101 case TYPE_IMUL_COMPARE:
18102 case TYPE_LMUL_COMPARE:
18103 {
18104 if (! store_data_bypass_p (dep_insn, insn))
18105 return 17;
18106 break;
18107 }
18108 case TYPE_IDIV:
18109 {
18110 if (! store_data_bypass_p (dep_insn, insn))
18111 return 45;
18112 break;
18113 }
18114 case TYPE_LDIV:
18115 {
18116 if (! store_data_bypass_p (dep_insn, insn))
18117 return 57;
18118 break;
18119 }
18120 default:
18121 break;
18122 }
18123 }
18124 break;
18125
18126 case TYPE_LOAD:
18127 case TYPE_LOAD_U:
18128 case TYPE_LOAD_UX:
18129 case TYPE_LOAD_EXT:
18130 case TYPE_LOAD_EXT_U:
18131 case TYPE_LOAD_EXT_UX:
18132 if ((rs6000_cpu == PROCESSOR_POWER6)
18133 && recog_memoized (dep_insn)
18134 && (INSN_CODE (dep_insn) >= 0))
18135 {
18136
18137 /* Adjust the cost for the case where the value written
18138 by a fixed point instruction is used within the address
18139 gen portion of a subsequent load(u)(x) */
18140 switch (get_attr_type (dep_insn))
18141 {
18142 case TYPE_LOAD:
18143 case TYPE_LOAD_U:
18144 case TYPE_LOAD_UX:
18145 case TYPE_CNTLZ:
18146 {
18147 if (set_to_load_agen (dep_insn, insn))
18148 return 4;
18149 break;
18150 }
18151 case TYPE_LOAD_EXT:
18152 case TYPE_LOAD_EXT_U:
18153 case TYPE_LOAD_EXT_UX:
18154 case TYPE_VAR_SHIFT_ROTATE:
18155 case TYPE_VAR_DELAYED_COMPARE:
18156 {
18157 if (set_to_load_agen (dep_insn, insn))
18158 return 6;
18159 break;
18160 }
18161 case TYPE_INTEGER:
18162 case TYPE_COMPARE:
18163 case TYPE_FAST_COMPARE:
18164 case TYPE_EXTS:
18165 case TYPE_SHIFT:
18166 case TYPE_INSERT_WORD:
18167 case TYPE_INSERT_DWORD:
18168 case TYPE_FPLOAD_U:
18169 case TYPE_FPLOAD_UX:
18170 case TYPE_STORE_U:
18171 case TYPE_STORE_UX:
18172 case TYPE_FPSTORE_U:
18173 case TYPE_FPSTORE_UX:
18174 {
18175 if (set_to_load_agen (dep_insn, insn))
18176 return 3;
18177 break;
18178 }
18179 case TYPE_IMUL:
18180 case TYPE_IMUL2:
18181 case TYPE_IMUL3:
18182 case TYPE_LMUL:
18183 case TYPE_IMUL_COMPARE:
18184 case TYPE_LMUL_COMPARE:
18185 {
18186 if (set_to_load_agen (dep_insn, insn))
18187 return 17;
18188 break;
18189 }
18190 case TYPE_IDIV:
18191 {
18192 if (set_to_load_agen (dep_insn, insn))
18193 return 45;
18194 break;
18195 }
18196 case TYPE_LDIV:
18197 {
18198 if (set_to_load_agen (dep_insn, insn))
18199 return 57;
18200 break;
18201 }
18202 default:
18203 break;
18204 }
18205 }
18206 break;
18207
18208 case TYPE_FPLOAD:
18209 if ((rs6000_cpu == PROCESSOR_POWER6)
18210 && recog_memoized (dep_insn)
18211 && (INSN_CODE (dep_insn) >= 0)
18212 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18213 return 2;
18214
18215 default:
18216 break;
18217 }
c9dbf840 18218
a251ffd0 18219 /* Fall out to return default cost. */
44cd321e
PS
18220 }
18221 break;
18222
18223 case REG_DEP_OUTPUT:
18224 /* Output dependency; DEP_INSN writes a register that INSN writes some
18225 cycles later. */
18226 if ((rs6000_cpu == PROCESSOR_POWER6)
18227 && recog_memoized (dep_insn)
18228 && (INSN_CODE (dep_insn) >= 0))
18229 {
18230 attr_type = get_attr_type (insn);
18231
18232 switch (attr_type)
18233 {
18234 case TYPE_FP:
18235 if (get_attr_type (dep_insn) == TYPE_FP)
18236 return 1;
18237 break;
18238 case TYPE_FPLOAD:
18239 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18240 return 2;
18241 break;
18242 default:
18243 break;
18244 }
18245 }
18246 case REG_DEP_ANTI:
18247 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18248 cycles later. */
18249 return 0;
18250
18251 default:
18252 gcc_unreachable ();
a251ffd0
TG
18253 }
18254
18255 return cost;
18256}
b6c9286a 18257
cbe26ab8 18258/* The function returns a true if INSN is microcoded.
839a4992 18259 Return false otherwise. */
cbe26ab8
DN
18260
18261static bool
18262is_microcoded_insn (rtx insn)
18263{
18264 if (!insn || !INSN_P (insn)
18265 || GET_CODE (PATTERN (insn)) == USE
18266 || GET_CODE (PATTERN (insn)) == CLOBBER)
18267 return false;
18268
d296e02e
AP
18269 if (rs6000_cpu_attr == CPU_CELL)
18270 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18271
ec507f2d 18272 if (rs6000_sched_groups)
cbe26ab8
DN
18273 {
18274 enum attr_type type = get_attr_type (insn);
18275 if (type == TYPE_LOAD_EXT_U
18276 || type == TYPE_LOAD_EXT_UX
18277 || type == TYPE_LOAD_UX
18278 || type == TYPE_STORE_UX
18279 || type == TYPE_MFCR)
c4ad648e 18280 return true;
cbe26ab8
DN
18281 }
18282
18283 return false;
18284}
18285
cbe26ab8
DN
18286/* The function returns true if INSN is cracked into 2 instructions
18287 by the processor (and therefore occupies 2 issue slots). */
18288
18289static bool
18290is_cracked_insn (rtx insn)
18291{
18292 if (!insn || !INSN_P (insn)
18293 || GET_CODE (PATTERN (insn)) == USE
18294 || GET_CODE (PATTERN (insn)) == CLOBBER)
18295 return false;
18296
ec507f2d 18297 if (rs6000_sched_groups)
cbe26ab8
DN
18298 {
18299 enum attr_type type = get_attr_type (insn);
18300 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18301 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18302 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18303 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18304 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18305 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18306 || type == TYPE_IDIV || type == TYPE_LDIV
18307 || type == TYPE_INSERT_WORD)
18308 return true;
cbe26ab8
DN
18309 }
18310
18311 return false;
18312}
18313
18314/* The function returns true if INSN can be issued only from
a3c9585f 18315 the branch slot. */
cbe26ab8
DN
18316
18317static bool
18318is_branch_slot_insn (rtx insn)
18319{
18320 if (!insn || !INSN_P (insn)
18321 || GET_CODE (PATTERN (insn)) == USE
18322 || GET_CODE (PATTERN (insn)) == CLOBBER)
18323 return false;
18324
ec507f2d 18325 if (rs6000_sched_groups)
cbe26ab8
DN
18326 {
18327 enum attr_type type = get_attr_type (insn);
18328 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18329 return true;
cbe26ab8
DN
18330 return false;
18331 }
18332
18333 return false;
18334}
79ae11c4 18335
44cd321e
PS
18336/* The function returns true if out_inst sets a value that is
18337 used in the address generation computation of in_insn */
18338static bool
18339set_to_load_agen (rtx out_insn, rtx in_insn)
18340{
18341 rtx out_set, in_set;
18342
18343 /* For performance reasons, only handle the simple case where
18344 both loads are a single_set. */
18345 out_set = single_set (out_insn);
18346 if (out_set)
18347 {
18348 in_set = single_set (in_insn);
18349 if (in_set)
18350 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18351 }
18352
18353 return false;
18354}
18355
18356/* The function returns true if the target storage location of
18357 out_insn is adjacent to the target storage location of in_insn */
18358/* Return 1 if memory locations are adjacent. */
18359
18360static bool
18361adjacent_mem_locations (rtx insn1, rtx insn2)
18362{
18363
e3a0e200
PB
18364 rtx a = get_store_dest (PATTERN (insn1));
18365 rtx b = get_store_dest (PATTERN (insn2));
18366
44cd321e
PS
18367 if ((GET_CODE (XEXP (a, 0)) == REG
18368 || (GET_CODE (XEXP (a, 0)) == PLUS
18369 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18370 && (GET_CODE (XEXP (b, 0)) == REG
18371 || (GET_CODE (XEXP (b, 0)) == PLUS
18372 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18373 {
f98e8938 18374 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18375 rtx reg0, reg1;
44cd321e
PS
18376
18377 if (GET_CODE (XEXP (a, 0)) == PLUS)
18378 {
18379 reg0 = XEXP (XEXP (a, 0), 0);
18380 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18381 }
18382 else
18383 reg0 = XEXP (a, 0);
18384
18385 if (GET_CODE (XEXP (b, 0)) == PLUS)
18386 {
18387 reg1 = XEXP (XEXP (b, 0), 0);
18388 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18389 }
18390 else
18391 reg1 = XEXP (b, 0);
18392
18393 val_diff = val1 - val0;
18394
18395 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18396 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18397 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18398 }
18399
18400 return false;
18401}
18402
a4f6c312 18403/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18404 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18405 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18406 define this macro if you do not need to adjust the scheduling
18407 priorities of insns. */
bef84347 18408
c237e94a 18409static int
a2369ed3 18410rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18411{
a4f6c312
SS
18412 /* On machines (like the 750) which have asymmetric integer units,
18413 where one integer unit can do multiply and divides and the other
18414 can't, reduce the priority of multiply/divide so it is scheduled
18415 before other integer operations. */
bef84347
VM
18416
18417#if 0
2c3c49de 18418 if (! INSN_P (insn))
bef84347
VM
18419 return priority;
18420
18421 if (GET_CODE (PATTERN (insn)) == USE)
18422 return priority;
18423
18424 switch (rs6000_cpu_attr) {
18425 case CPU_PPC750:
18426 switch (get_attr_type (insn))
18427 {
18428 default:
18429 break;
18430
18431 case TYPE_IMUL:
18432 case TYPE_IDIV:
3cb999d8
DE
18433 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18434 priority, priority);
bef84347
VM
18435 if (priority >= 0 && priority < 0x01000000)
18436 priority >>= 3;
18437 break;
18438 }
18439 }
18440#endif
18441
44cd321e 18442 if (insn_must_be_first_in_group (insn)
79ae11c4 18443 && reload_completed
f676971a 18444 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18445 && rs6000_sched_restricted_insns_priority)
18446 {
18447
c4ad648e
AM
18448 /* Prioritize insns that can be dispatched only in the first
18449 dispatch slot. */
79ae11c4 18450 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18451 /* Attach highest priority to insn. This means that in
18452 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18453 precede 'priority' (critical path) considerations. */
f676971a 18454 return current_sched_info->sched_max_insns_priority;
79ae11c4 18455 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18456 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18457 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18458 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18459 return (priority + 1);
18460 }
79ae11c4 18461
44cd321e
PS
18462 if (rs6000_cpu == PROCESSOR_POWER6
18463 && ((load_store_pendulum == -2 && is_load_insn (insn))
18464 || (load_store_pendulum == 2 && is_store_insn (insn))))
18465 /* Attach highest priority to insn if the scheduler has just issued two
18466 stores and this instruction is a load, or two loads and this instruction
18467 is a store. Power6 wants loads and stores scheduled alternately
18468 when possible */
18469 return current_sched_info->sched_max_insns_priority;
18470
bef84347
VM
18471 return priority;
18472}
18473
d296e02e
AP
18474/* Return true if the instruction is nonpipelined on the Cell. */
18475static bool
18476is_nonpipeline_insn (rtx insn)
18477{
18478 enum attr_type type;
18479 if (!insn || !INSN_P (insn)
18480 || GET_CODE (PATTERN (insn)) == USE
18481 || GET_CODE (PATTERN (insn)) == CLOBBER)
18482 return false;
18483
18484 type = get_attr_type (insn);
18485 if (type == TYPE_IMUL
18486 || type == TYPE_IMUL2
18487 || type == TYPE_IMUL3
18488 || type == TYPE_LMUL
18489 || type == TYPE_IDIV
18490 || type == TYPE_LDIV
18491 || type == TYPE_SDIV
18492 || type == TYPE_DDIV
18493 || type == TYPE_SSQRT
18494 || type == TYPE_DSQRT
18495 || type == TYPE_MFCR
18496 || type == TYPE_MFCRF
18497 || type == TYPE_MFJMPR)
18498 {
18499 return true;
18500 }
18501 return false;
18502}
18503
18504
a4f6c312
SS
18505/* Return how many instructions the machine can issue per cycle. */
18506
c237e94a 18507static int
863d938c 18508rs6000_issue_rate (void)
b6c9286a 18509{
3317bab1
DE
18510 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18511 if (!reload_completed)
18512 return 1;
18513
b6c9286a 18514 switch (rs6000_cpu_attr) {
3cb999d8
DE
18515 case CPU_RIOS1: /* ? */
18516 case CPU_RS64A:
18517 case CPU_PPC601: /* ? */
ed947a96 18518 case CPU_PPC7450:
3cb999d8 18519 return 3;
b54cf83a 18520 case CPU_PPC440:
b6c9286a 18521 case CPU_PPC603:
bef84347 18522 case CPU_PPC750:
ed947a96 18523 case CPU_PPC7400:
be12c2b0 18524 case CPU_PPC8540:
d296e02e 18525 case CPU_CELL:
f676971a 18526 return 2;
3cb999d8 18527 case CPU_RIOS2:
b6c9286a 18528 case CPU_PPC604:
19684119 18529 case CPU_PPC604E:
b6c9286a 18530 case CPU_PPC620:
3cb999d8 18531 case CPU_PPC630:
b6c9286a 18532 return 4;
cbe26ab8 18533 case CPU_POWER4:
ec507f2d 18534 case CPU_POWER5:
44cd321e 18535 case CPU_POWER6:
cbe26ab8 18536 return 5;
b6c9286a
MM
18537 default:
18538 return 1;
18539 }
18540}
18541
be12c2b0
VM
18542/* Return how many instructions to look ahead for better insn
18543 scheduling. */
18544
18545static int
863d938c 18546rs6000_use_sched_lookahead (void)
be12c2b0
VM
18547{
18548 if (rs6000_cpu_attr == CPU_PPC8540)
18549 return 4;
d296e02e
AP
18550 if (rs6000_cpu_attr == CPU_CELL)
18551 return (reload_completed ? 8 : 0);
be12c2b0
VM
18552 return 0;
18553}
18554
d296e02e
AP
18555/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18556static int
18557rs6000_use_sched_lookahead_guard (rtx insn)
18558{
18559 if (rs6000_cpu_attr != CPU_CELL)
18560 return 1;
18561
18562 if (insn == NULL_RTX || !INSN_P (insn))
18563 abort ();
982afe02 18564
d296e02e
AP
18565 if (!reload_completed
18566 || is_nonpipeline_insn (insn)
18567 || is_microcoded_insn (insn))
18568 return 0;
18569
18570 return 1;
18571}
18572
569fa502
DN
18573/* Determine is PAT refers to memory. */
18574
18575static bool
18576is_mem_ref (rtx pat)
18577{
18578 const char * fmt;
18579 int i, j;
18580 bool ret = false;
18581
1de59bbd
DE
18582 /* stack_tie does not produce any real memory traffic. */
18583 if (GET_CODE (pat) == UNSPEC
18584 && XINT (pat, 1) == UNSPEC_TIE)
18585 return false;
18586
569fa502
DN
18587 if (GET_CODE (pat) == MEM)
18588 return true;
18589
18590 /* Recursively process the pattern. */
18591 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18592
18593 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18594 {
18595 if (fmt[i] == 'e')
18596 ret |= is_mem_ref (XEXP (pat, i));
18597 else if (fmt[i] == 'E')
18598 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18599 ret |= is_mem_ref (XVECEXP (pat, i, j));
18600 }
18601
18602 return ret;
18603}
18604
18605/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18606
569fa502
DN
18607static bool
18608is_load_insn1 (rtx pat)
18609{
18610 if (!pat || pat == NULL_RTX)
18611 return false;
18612
18613 if (GET_CODE (pat) == SET)
18614 return is_mem_ref (SET_SRC (pat));
18615
18616 if (GET_CODE (pat) == PARALLEL)
18617 {
18618 int i;
18619
18620 for (i = 0; i < XVECLEN (pat, 0); i++)
18621 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18622 return true;
18623 }
18624
18625 return false;
18626}
18627
18628/* Determine if INSN loads from memory. */
18629
18630static bool
18631is_load_insn (rtx insn)
18632{
18633 if (!insn || !INSN_P (insn))
18634 return false;
18635
18636 if (GET_CODE (insn) == CALL_INSN)
18637 return false;
18638
18639 return is_load_insn1 (PATTERN (insn));
18640}
18641
18642/* Determine if PAT is a PATTERN of a store insn. */
18643
18644static bool
18645is_store_insn1 (rtx pat)
18646{
18647 if (!pat || pat == NULL_RTX)
18648 return false;
18649
18650 if (GET_CODE (pat) == SET)
18651 return is_mem_ref (SET_DEST (pat));
18652
18653 if (GET_CODE (pat) == PARALLEL)
18654 {
18655 int i;
18656
18657 for (i = 0; i < XVECLEN (pat, 0); i++)
18658 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18659 return true;
18660 }
18661
18662 return false;
18663}
18664
18665/* Determine if INSN stores to memory. */
18666
18667static bool
18668is_store_insn (rtx insn)
18669{
18670 if (!insn || !INSN_P (insn))
18671 return false;
18672
18673 return is_store_insn1 (PATTERN (insn));
18674}
18675
e3a0e200
PB
18676/* Return the dest of a store insn. */
18677
18678static rtx
18679get_store_dest (rtx pat)
18680{
18681 gcc_assert (is_store_insn1 (pat));
18682
18683 if (GET_CODE (pat) == SET)
18684 return SET_DEST (pat);
18685 else if (GET_CODE (pat) == PARALLEL)
18686 {
18687 int i;
18688
18689 for (i = 0; i < XVECLEN (pat, 0); i++)
18690 {
18691 rtx inner_pat = XVECEXP (pat, 0, i);
18692 if (GET_CODE (inner_pat) == SET
18693 && is_mem_ref (SET_DEST (inner_pat)))
18694 return inner_pat;
18695 }
18696 }
18697 /* We shouldn't get here, because we should have either a simple
18698 store insn or a store with update which are covered above. */
18699 gcc_unreachable();
18700}
18701
569fa502
DN
18702/* Returns whether the dependence between INSN and NEXT is considered
18703 costly by the given target. */
18704
18705static bool
b198261f 18706rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18707{
b198261f
MK
18708 rtx insn;
18709 rtx next;
18710
aabcd309 18711 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18712 allow all dependent insns in the same group.
569fa502
DN
18713 This is the most aggressive option. */
18714 if (rs6000_sched_costly_dep == no_dep_costly)
18715 return false;
18716
f676971a 18717 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18718 do not allow dependent instructions in the same group.
18719 This is the most conservative option. */
18720 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18721 return true;
569fa502 18722
b198261f
MK
18723 insn = DEP_PRO (dep);
18724 next = DEP_CON (dep);
18725
f676971a
EC
18726 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18727 && is_load_insn (next)
569fa502
DN
18728 && is_store_insn (insn))
18729 /* Prevent load after store in the same group. */
18730 return true;
18731
18732 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18733 && is_load_insn (next)
569fa502 18734 && is_store_insn (insn)
e2f6ff94 18735 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18736 /* Prevent load after store in the same group if it is a true
18737 dependence. */
569fa502 18738 return true;
f676971a
EC
18739
18740 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18741 and will not be scheduled in the same group. */
18742 if (rs6000_sched_costly_dep <= max_dep_latency
18743 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18744 return true;
18745
18746 return false;
18747}
18748
f676971a 18749/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18750 skipping any "non-active" insns - insns that will not actually occupy
18751 an issue slot. Return NULL_RTX if such an insn is not found. */
18752
18753static rtx
18754get_next_active_insn (rtx insn, rtx tail)
18755{
f489aff8 18756 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18757 return NULL_RTX;
18758
f489aff8 18759 while (1)
cbe26ab8 18760 {
f489aff8
AM
18761 insn = NEXT_INSN (insn);
18762 if (insn == NULL_RTX || insn == tail)
18763 return NULL_RTX;
cbe26ab8 18764
f489aff8
AM
18765 if (CALL_P (insn)
18766 || JUMP_P (insn)
18767 || (NONJUMP_INSN_P (insn)
18768 && GET_CODE (PATTERN (insn)) != USE
18769 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18770 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18771 break;
18772 }
18773 return insn;
cbe26ab8
DN
18774}
18775
44cd321e
PS
18776/* We are about to begin issuing insns for this clock cycle. */
18777
18778static int
18779rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18780 rtx *ready ATTRIBUTE_UNUSED,
18781 int *pn_ready ATTRIBUTE_UNUSED,
18782 int clock_var ATTRIBUTE_UNUSED)
18783{
d296e02e
AP
18784 int n_ready = *pn_ready;
18785
44cd321e
PS
18786 if (sched_verbose)
18787 fprintf (dump, "// rs6000_sched_reorder :\n");
18788
d296e02e
AP
18789 /* Reorder the ready list, if the second to last ready insn
18790 is a nonepipeline insn. */
18791 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18792 {
18793 if (is_nonpipeline_insn (ready[n_ready - 1])
18794 && (recog_memoized (ready[n_ready - 2]) > 0))
18795 /* Simply swap first two insns. */
18796 {
18797 rtx tmp = ready[n_ready - 1];
18798 ready[n_ready - 1] = ready[n_ready - 2];
18799 ready[n_ready - 2] = tmp;
18800 }
18801 }
18802
44cd321e
PS
18803 if (rs6000_cpu == PROCESSOR_POWER6)
18804 load_store_pendulum = 0;
18805
18806 return rs6000_issue_rate ();
18807}
18808
18809/* Like rs6000_sched_reorder, but called after issuing each insn. */
18810
18811static int
18812rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18813 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18814{
18815 if (sched_verbose)
18816 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18817
18818 /* For Power6, we need to handle some special cases to try and keep the
18819 store queue from overflowing and triggering expensive flushes.
18820
18821 This code monitors how load and store instructions are being issued
18822 and skews the ready list one way or the other to increase the likelihood
18823 that a desired instruction is issued at the proper time.
18824
18825 A couple of things are done. First, we maintain a "load_store_pendulum"
18826 to track the current state of load/store issue.
18827
18828 - If the pendulum is at zero, then no loads or stores have been
18829 issued in the current cycle so we do nothing.
18830
18831 - If the pendulum is 1, then a single load has been issued in this
18832 cycle and we attempt to locate another load in the ready list to
18833 issue with it.
18834
2f8e468b 18835 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18836 issued in this cycle, so we increase the priority of the first load
18837 in the ready list to increase it's likelihood of being chosen first
18838 in the next cycle.
18839
18840 - If the pendulum is -1, then a single store has been issued in this
18841 cycle and we attempt to locate another store in the ready list to
18842 issue with it, preferring a store to an adjacent memory location to
18843 facilitate store pairing in the store queue.
18844
18845 - If the pendulum is 2, then two loads have already been
18846 issued in this cycle, so we increase the priority of the first store
18847 in the ready list to increase it's likelihood of being chosen first
18848 in the next cycle.
18849
18850 - If the pendulum < -2 or > 2, then do nothing.
18851
18852 Note: This code covers the most common scenarios. There exist non
18853 load/store instructions which make use of the LSU and which
18854 would need to be accounted for to strictly model the behavior
18855 of the machine. Those instructions are currently unaccounted
18856 for to help minimize compile time overhead of this code.
18857 */
18858 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18859 {
18860 int pos;
18861 int i;
18862 rtx tmp;
18863
18864 if (is_store_insn (last_scheduled_insn))
18865 /* Issuing a store, swing the load_store_pendulum to the left */
18866 load_store_pendulum--;
18867 else if (is_load_insn (last_scheduled_insn))
18868 /* Issuing a load, swing the load_store_pendulum to the right */
18869 load_store_pendulum++;
18870 else
18871 return cached_can_issue_more;
18872
18873 /* If the pendulum is balanced, or there is only one instruction on
18874 the ready list, then all is well, so return. */
18875 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18876 return cached_can_issue_more;
18877
18878 if (load_store_pendulum == 1)
18879 {
18880 /* A load has been issued in this cycle. Scan the ready list
18881 for another load to issue with it */
18882 pos = *pn_ready-1;
18883
18884 while (pos >= 0)
18885 {
18886 if (is_load_insn (ready[pos]))
18887 {
18888 /* Found a load. Move it to the head of the ready list,
18889 and adjust it's priority so that it is more likely to
18890 stay there */
18891 tmp = ready[pos];
18892 for (i=pos; i<*pn_ready-1; i++)
18893 ready[i] = ready[i + 1];
18894 ready[*pn_ready-1] = tmp;
18895 if INSN_PRIORITY_KNOWN (tmp)
18896 INSN_PRIORITY (tmp)++;
18897 break;
18898 }
18899 pos--;
18900 }
18901 }
18902 else if (load_store_pendulum == -2)
18903 {
18904 /* Two stores have been issued in this cycle. Increase the
18905 priority of the first load in the ready list to favor it for
18906 issuing in the next cycle. */
18907 pos = *pn_ready-1;
18908
18909 while (pos >= 0)
18910 {
18911 if (is_load_insn (ready[pos])
18912 && INSN_PRIORITY_KNOWN (ready[pos]))
18913 {
18914 INSN_PRIORITY (ready[pos])++;
18915
18916 /* Adjust the pendulum to account for the fact that a load
18917 was found and increased in priority. This is to prevent
18918 increasing the priority of multiple loads */
18919 load_store_pendulum--;
18920
18921 break;
18922 }
18923 pos--;
18924 }
18925 }
18926 else if (load_store_pendulum == -1)
18927 {
18928 /* A store has been issued in this cycle. Scan the ready list for
18929 another store to issue with it, preferring a store to an adjacent
18930 memory location */
18931 int first_store_pos = -1;
18932
18933 pos = *pn_ready-1;
18934
18935 while (pos >= 0)
18936 {
18937 if (is_store_insn (ready[pos]))
18938 {
18939 /* Maintain the index of the first store found on the
18940 list */
18941 if (first_store_pos == -1)
18942 first_store_pos = pos;
18943
18944 if (is_store_insn (last_scheduled_insn)
18945 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18946 {
18947 /* Found an adjacent store. Move it to the head of the
18948 ready list, and adjust it's priority so that it is
18949 more likely to stay there */
18950 tmp = ready[pos];
18951 for (i=pos; i<*pn_ready-1; i++)
18952 ready[i] = ready[i + 1];
18953 ready[*pn_ready-1] = tmp;
18954 if INSN_PRIORITY_KNOWN (tmp)
18955 INSN_PRIORITY (tmp)++;
18956 first_store_pos = -1;
18957
18958 break;
18959 };
18960 }
18961 pos--;
18962 }
18963
18964 if (first_store_pos >= 0)
18965 {
18966 /* An adjacent store wasn't found, but a non-adjacent store was,
18967 so move the non-adjacent store to the front of the ready
18968 list, and adjust its priority so that it is more likely to
18969 stay there. */
18970 tmp = ready[first_store_pos];
18971 for (i=first_store_pos; i<*pn_ready-1; i++)
18972 ready[i] = ready[i + 1];
18973 ready[*pn_ready-1] = tmp;
18974 if INSN_PRIORITY_KNOWN (tmp)
18975 INSN_PRIORITY (tmp)++;
18976 }
18977 }
18978 else if (load_store_pendulum == 2)
18979 {
18980 /* Two loads have been issued in this cycle. Increase the priority
18981 of the first store in the ready list to favor it for issuing in
18982 the next cycle. */
18983 pos = *pn_ready-1;
18984
18985 while (pos >= 0)
18986 {
18987 if (is_store_insn (ready[pos])
18988 && INSN_PRIORITY_KNOWN (ready[pos]))
18989 {
18990 INSN_PRIORITY (ready[pos])++;
18991
18992 /* Adjust the pendulum to account for the fact that a store
18993 was found and increased in priority. This is to prevent
18994 increasing the priority of multiple stores */
18995 load_store_pendulum++;
18996
18997 break;
18998 }
18999 pos--;
19000 }
19001 }
19002 }
19003
19004 return cached_can_issue_more;
19005}
19006
839a4992 19007/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19008 of group WHICH_GROUP.
19009
19010 If WHICH_GROUP == current_group, this function will return true if INSN
19011 causes the termination of the current group (i.e, the dispatch group to
19012 which INSN belongs). This means that INSN will be the last insn in the
19013 group it belongs to.
19014
19015 If WHICH_GROUP == previous_group, this function will return true if INSN
19016 causes the termination of the previous group (i.e, the dispatch group that
19017 precedes the group to which INSN belongs). This means that INSN will be
19018 the first insn in the group it belongs to). */
19019
19020static bool
19021insn_terminates_group_p (rtx insn, enum group_termination which_group)
19022{
44cd321e 19023 bool first, last;
cbe26ab8
DN
19024
19025 if (! insn)
19026 return false;
569fa502 19027
44cd321e
PS
19028 first = insn_must_be_first_in_group (insn);
19029 last = insn_must_be_last_in_group (insn);
cbe26ab8 19030
44cd321e 19031 if (first && last)
cbe26ab8
DN
19032 return true;
19033
19034 if (which_group == current_group)
44cd321e 19035 return last;
cbe26ab8 19036 else if (which_group == previous_group)
44cd321e
PS
19037 return first;
19038
19039 return false;
19040}
19041
19042
19043static bool
19044insn_must_be_first_in_group (rtx insn)
19045{
19046 enum attr_type type;
19047
19048 if (!insn
19049 || insn == NULL_RTX
19050 || GET_CODE (insn) == NOTE
19051 || GET_CODE (PATTERN (insn)) == USE
19052 || GET_CODE (PATTERN (insn)) == CLOBBER)
19053 return false;
19054
19055 switch (rs6000_cpu)
cbe26ab8 19056 {
44cd321e
PS
19057 case PROCESSOR_POWER5:
19058 if (is_cracked_insn (insn))
19059 return true;
19060 case PROCESSOR_POWER4:
19061 if (is_microcoded_insn (insn))
19062 return true;
19063
19064 if (!rs6000_sched_groups)
19065 return false;
19066
19067 type = get_attr_type (insn);
19068
19069 switch (type)
19070 {
19071 case TYPE_MFCR:
19072 case TYPE_MFCRF:
19073 case TYPE_MTCR:
19074 case TYPE_DELAYED_CR:
19075 case TYPE_CR_LOGICAL:
19076 case TYPE_MTJMPR:
19077 case TYPE_MFJMPR:
19078 case TYPE_IDIV:
19079 case TYPE_LDIV:
19080 case TYPE_LOAD_L:
19081 case TYPE_STORE_C:
19082 case TYPE_ISYNC:
19083 case TYPE_SYNC:
19084 return true;
19085 default:
19086 break;
19087 }
19088 break;
19089 case PROCESSOR_POWER6:
19090 type = get_attr_type (insn);
19091
19092 switch (type)
19093 {
19094 case TYPE_INSERT_DWORD:
19095 case TYPE_EXTS:
19096 case TYPE_CNTLZ:
19097 case TYPE_SHIFT:
19098 case TYPE_VAR_SHIFT_ROTATE:
19099 case TYPE_TRAP:
19100 case TYPE_IMUL:
19101 case TYPE_IMUL2:
19102 case TYPE_IMUL3:
19103 case TYPE_LMUL:
19104 case TYPE_IDIV:
19105 case TYPE_INSERT_WORD:
19106 case TYPE_DELAYED_COMPARE:
19107 case TYPE_IMUL_COMPARE:
19108 case TYPE_LMUL_COMPARE:
19109 case TYPE_FPCOMPARE:
19110 case TYPE_MFCR:
19111 case TYPE_MTCR:
19112 case TYPE_MFJMPR:
19113 case TYPE_MTJMPR:
19114 case TYPE_ISYNC:
19115 case TYPE_SYNC:
19116 case TYPE_LOAD_L:
19117 case TYPE_STORE_C:
19118 case TYPE_LOAD_U:
19119 case TYPE_LOAD_UX:
19120 case TYPE_LOAD_EXT_UX:
19121 case TYPE_STORE_U:
19122 case TYPE_STORE_UX:
19123 case TYPE_FPLOAD_U:
19124 case TYPE_FPLOAD_UX:
19125 case TYPE_FPSTORE_U:
19126 case TYPE_FPSTORE_UX:
19127 return true;
19128 default:
19129 break;
19130 }
19131 break;
19132 default:
19133 break;
19134 }
19135
19136 return false;
19137}
19138
19139static bool
19140insn_must_be_last_in_group (rtx insn)
19141{
19142 enum attr_type type;
19143
19144 if (!insn
19145 || insn == NULL_RTX
19146 || GET_CODE (insn) == NOTE
19147 || GET_CODE (PATTERN (insn)) == USE
19148 || GET_CODE (PATTERN (insn)) == CLOBBER)
19149 return false;
19150
19151 switch (rs6000_cpu) {
19152 case PROCESSOR_POWER4:
19153 case PROCESSOR_POWER5:
19154 if (is_microcoded_insn (insn))
19155 return true;
19156
19157 if (is_branch_slot_insn (insn))
19158 return true;
19159
19160 break;
19161 case PROCESSOR_POWER6:
19162 type = get_attr_type (insn);
19163
19164 switch (type)
19165 {
19166 case TYPE_EXTS:
19167 case TYPE_CNTLZ:
19168 case TYPE_SHIFT:
19169 case TYPE_VAR_SHIFT_ROTATE:
19170 case TYPE_TRAP:
19171 case TYPE_IMUL:
19172 case TYPE_IMUL2:
19173 case TYPE_IMUL3:
19174 case TYPE_LMUL:
19175 case TYPE_IDIV:
19176 case TYPE_DELAYED_COMPARE:
19177 case TYPE_IMUL_COMPARE:
19178 case TYPE_LMUL_COMPARE:
19179 case TYPE_FPCOMPARE:
19180 case TYPE_MFCR:
19181 case TYPE_MTCR:
19182 case TYPE_MFJMPR:
19183 case TYPE_MTJMPR:
19184 case TYPE_ISYNC:
19185 case TYPE_SYNC:
19186 case TYPE_LOAD_L:
19187 case TYPE_STORE_C:
19188 return true;
19189 default:
19190 break;
cbe26ab8 19191 }
44cd321e
PS
19192 break;
19193 default:
19194 break;
19195 }
cbe26ab8
DN
19196
19197 return false;
19198}
19199
839a4992 19200/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19201 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19202
19203static bool
19204is_costly_group (rtx *group_insns, rtx next_insn)
19205{
19206 int i;
cbe26ab8
DN
19207 int issue_rate = rs6000_issue_rate ();
19208
19209 for (i = 0; i < issue_rate; i++)
19210 {
e2f6ff94
MK
19211 sd_iterator_def sd_it;
19212 dep_t dep;
cbe26ab8 19213 rtx insn = group_insns[i];
b198261f 19214
cbe26ab8 19215 if (!insn)
c4ad648e 19216 continue;
b198261f 19217
e2f6ff94 19218 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19219 {
b198261f
MK
19220 rtx next = DEP_CON (dep);
19221
19222 if (next == next_insn
19223 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19224 return true;
c4ad648e 19225 }
cbe26ab8
DN
19226 }
19227
19228 return false;
19229}
19230
f676971a 19231/* Utility of the function redefine_groups.
cbe26ab8
DN
19232 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19233 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19234 to keep it "far" (in a separate group) from GROUP_INSNS, following
19235 one of the following schemes, depending on the value of the flag
19236 -minsert_sched_nops = X:
19237 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19238 in order to force NEXT_INSN into a separate group.
f676971a
EC
19239 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19240 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19241 insertion (has a group just ended, how many vacant issue slots remain in the
19242 last group, and how many dispatch groups were encountered so far). */
19243
f676971a 19244static int
c4ad648e
AM
19245force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19246 rtx next_insn, bool *group_end, int can_issue_more,
19247 int *group_count)
cbe26ab8
DN
19248{
19249 rtx nop;
19250 bool force;
19251 int issue_rate = rs6000_issue_rate ();
19252 bool end = *group_end;
19253 int i;
19254
19255 if (next_insn == NULL_RTX)
19256 return can_issue_more;
19257
19258 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19259 return can_issue_more;
19260
19261 force = is_costly_group (group_insns, next_insn);
19262 if (!force)
19263 return can_issue_more;
19264
19265 if (sched_verbose > 6)
19266 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19267 *group_count ,can_issue_more);
cbe26ab8
DN
19268
19269 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19270 {
19271 if (*group_end)
c4ad648e 19272 can_issue_more = 0;
cbe26ab8
DN
19273
19274 /* Since only a branch can be issued in the last issue_slot, it is
19275 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19276 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19277 in this case the last nop will start a new group and the branch
19278 will be forced to the new group. */
cbe26ab8 19279 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19280 can_issue_more--;
cbe26ab8
DN
19281
19282 while (can_issue_more > 0)
c4ad648e 19283 {
9390387d 19284 nop = gen_nop ();
c4ad648e
AM
19285 emit_insn_before (nop, next_insn);
19286 can_issue_more--;
19287 }
cbe26ab8
DN
19288
19289 *group_end = true;
19290 return 0;
f676971a 19291 }
cbe26ab8
DN
19292
19293 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19294 {
19295 int n_nops = rs6000_sched_insert_nops;
19296
f676971a 19297 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19298 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19299 if (can_issue_more == 0)
c4ad648e 19300 can_issue_more = issue_rate;
cbe26ab8
DN
19301 can_issue_more--;
19302 if (can_issue_more == 0)
c4ad648e
AM
19303 {
19304 can_issue_more = issue_rate - 1;
19305 (*group_count)++;
19306 end = true;
19307 for (i = 0; i < issue_rate; i++)
19308 {
19309 group_insns[i] = 0;
19310 }
19311 }
cbe26ab8
DN
19312
19313 while (n_nops > 0)
c4ad648e
AM
19314 {
19315 nop = gen_nop ();
19316 emit_insn_before (nop, next_insn);
19317 if (can_issue_more == issue_rate - 1) /* new group begins */
19318 end = false;
19319 can_issue_more--;
19320 if (can_issue_more == 0)
19321 {
19322 can_issue_more = issue_rate - 1;
19323 (*group_count)++;
19324 end = true;
19325 for (i = 0; i < issue_rate; i++)
19326 {
19327 group_insns[i] = 0;
19328 }
19329 }
19330 n_nops--;
19331 }
cbe26ab8
DN
19332
19333 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19334 can_issue_more++;
cbe26ab8 19335
c4ad648e
AM
19336 /* Is next_insn going to start a new group? */
19337 *group_end
19338 = (end
cbe26ab8
DN
19339 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19340 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19341 || (can_issue_more < issue_rate &&
c4ad648e 19342 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19343 if (*group_end && end)
c4ad648e 19344 (*group_count)--;
cbe26ab8
DN
19345
19346 if (sched_verbose > 6)
c4ad648e
AM
19347 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19348 *group_count, can_issue_more);
f676971a
EC
19349 return can_issue_more;
19350 }
cbe26ab8
DN
19351
19352 return can_issue_more;
19353}
19354
19355/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19356 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19357 form in practice. It tries to achieve this synchronization by forcing the
19358 estimated processor grouping on the compiler (as opposed to the function
19359 'pad_goups' which tries to force the scheduler's grouping on the processor).
19360
19361 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19362 examines the (estimated) dispatch groups that will be formed by the processor
19363 dispatcher. It marks these group boundaries to reflect the estimated
19364 processor grouping, overriding the grouping that the scheduler had marked.
19365 Depending on the value of the flag '-minsert-sched-nops' this function can
19366 force certain insns into separate groups or force a certain distance between
19367 them by inserting nops, for example, if there exists a "costly dependence"
19368 between the insns.
19369
19370 The function estimates the group boundaries that the processor will form as
0fa2e4df 19371 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19372 each insn. A subsequent insn will start a new group if one of the following
19373 4 cases applies:
19374 - no more vacant issue slots remain in the current dispatch group.
19375 - only the last issue slot, which is the branch slot, is vacant, but the next
19376 insn is not a branch.
19377 - only the last 2 or less issue slots, including the branch slot, are vacant,
19378 which means that a cracked insn (which occupies two issue slots) can't be
19379 issued in this group.
f676971a 19380 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19381 start a new group. */
19382
19383static int
19384redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19385{
19386 rtx insn, next_insn;
19387 int issue_rate;
19388 int can_issue_more;
19389 int slot, i;
19390 bool group_end;
19391 int group_count = 0;
19392 rtx *group_insns;
19393
19394 /* Initialize. */
19395 issue_rate = rs6000_issue_rate ();
19396 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19397 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19398 {
19399 group_insns[i] = 0;
19400 }
19401 can_issue_more = issue_rate;
19402 slot = 0;
19403 insn = get_next_active_insn (prev_head_insn, tail);
19404 group_end = false;
19405
19406 while (insn != NULL_RTX)
19407 {
19408 slot = (issue_rate - can_issue_more);
19409 group_insns[slot] = insn;
19410 can_issue_more =
c4ad648e 19411 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19412 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19413 can_issue_more = 0;
cbe26ab8
DN
19414
19415 next_insn = get_next_active_insn (insn, tail);
19416 if (next_insn == NULL_RTX)
c4ad648e 19417 return group_count + 1;
cbe26ab8 19418
c4ad648e
AM
19419 /* Is next_insn going to start a new group? */
19420 group_end
19421 = (can_issue_more == 0
19422 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19423 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19424 || (can_issue_more < issue_rate &&
19425 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19426
f676971a 19427 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19428 next_insn, &group_end, can_issue_more,
19429 &group_count);
cbe26ab8
DN
19430
19431 if (group_end)
c4ad648e
AM
19432 {
19433 group_count++;
19434 can_issue_more = 0;
19435 for (i = 0; i < issue_rate; i++)
19436 {
19437 group_insns[i] = 0;
19438 }
19439 }
cbe26ab8
DN
19440
19441 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19442 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19443 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19444 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19445
19446 insn = next_insn;
19447 if (can_issue_more == 0)
c4ad648e
AM
19448 can_issue_more = issue_rate;
19449 } /* while */
cbe26ab8
DN
19450
19451 return group_count;
19452}
19453
19454/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19455 dispatch group boundaries that the scheduler had marked. Pad with nops
19456 any dispatch groups which have vacant issue slots, in order to force the
19457 scheduler's grouping on the processor dispatcher. The function
19458 returns the number of dispatch groups found. */
19459
19460static int
19461pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19462{
19463 rtx insn, next_insn;
19464 rtx nop;
19465 int issue_rate;
19466 int can_issue_more;
19467 int group_end;
19468 int group_count = 0;
19469
19470 /* Initialize issue_rate. */
19471 issue_rate = rs6000_issue_rate ();
19472 can_issue_more = issue_rate;
19473
19474 insn = get_next_active_insn (prev_head_insn, tail);
19475 next_insn = get_next_active_insn (insn, tail);
19476
19477 while (insn != NULL_RTX)
19478 {
19479 can_issue_more =
19480 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19481
19482 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19483
19484 if (next_insn == NULL_RTX)
c4ad648e 19485 break;
cbe26ab8
DN
19486
19487 if (group_end)
c4ad648e
AM
19488 {
19489 /* If the scheduler had marked group termination at this location
19490 (between insn and next_indn), and neither insn nor next_insn will
19491 force group termination, pad the group with nops to force group
19492 termination. */
19493 if (can_issue_more
19494 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19495 && !insn_terminates_group_p (insn, current_group)
19496 && !insn_terminates_group_p (next_insn, previous_group))
19497 {
9390387d 19498 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19499 can_issue_more--;
19500
19501 while (can_issue_more)
19502 {
19503 nop = gen_nop ();
19504 emit_insn_before (nop, next_insn);
19505 can_issue_more--;
19506 }
19507 }
19508
19509 can_issue_more = issue_rate;
19510 group_count++;
19511 }
cbe26ab8
DN
19512
19513 insn = next_insn;
19514 next_insn = get_next_active_insn (insn, tail);
19515 }
19516
19517 return group_count;
19518}
19519
44cd321e
PS
19520/* We're beginning a new block. Initialize data structures as necessary. */
19521
19522static void
19523rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19524 int sched_verbose ATTRIBUTE_UNUSED,
19525 int max_ready ATTRIBUTE_UNUSED)
982afe02 19526{
44cd321e
PS
19527 last_scheduled_insn = NULL_RTX;
19528 load_store_pendulum = 0;
19529}
19530
cbe26ab8
DN
19531/* The following function is called at the end of scheduling BB.
19532 After reload, it inserts nops at insn group bundling. */
19533
19534static void
38f391a5 19535rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19536{
19537 int n_groups;
19538
19539 if (sched_verbose)
19540 fprintf (dump, "=== Finishing schedule.\n");
19541
ec507f2d 19542 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19543 {
19544 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19545 return;
cbe26ab8
DN
19546
19547 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19548 n_groups = pad_groups (dump, sched_verbose,
19549 current_sched_info->prev_head,
19550 current_sched_info->next_tail);
cbe26ab8 19551 else
c4ad648e
AM
19552 n_groups = redefine_groups (dump, sched_verbose,
19553 current_sched_info->prev_head,
19554 current_sched_info->next_tail);
cbe26ab8
DN
19555
19556 if (sched_verbose >= 6)
19557 {
19558 fprintf (dump, "ngroups = %d\n", n_groups);
19559 print_rtl (dump, current_sched_info->prev_head);
19560 fprintf (dump, "Done finish_sched\n");
19561 }
19562 }
19563}
b6c9286a 19564\f
b6c9286a
MM
19565/* Length in units of the trampoline for entering a nested function. */
19566
19567int
863d938c 19568rs6000_trampoline_size (void)
b6c9286a
MM
19569{
19570 int ret = 0;
19571
19572 switch (DEFAULT_ABI)
19573 {
19574 default:
37409796 19575 gcc_unreachable ();
b6c9286a
MM
19576
19577 case ABI_AIX:
8f802bfb 19578 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19579 break;
19580
4dabc42d 19581 case ABI_DARWIN:
b6c9286a 19582 case ABI_V4:
03a7e1a5 19583 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19584 break;
b6c9286a
MM
19585 }
19586
19587 return ret;
19588}
19589
19590/* Emit RTL insns to initialize the variable parts of a trampoline.
19591 FNADDR is an RTX for the address of the function's pure code.
19592 CXT is an RTX for the static chain value for the function. */
19593
19594void
a2369ed3 19595rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19596{
8bd04c56 19597 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19598 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19599
19600 switch (DEFAULT_ABI)
19601 {
19602 default:
37409796 19603 gcc_unreachable ();
b6c9286a 19604
8bd04c56 19605/* Macros to shorten the code expansions below. */
9613eaff 19606#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19607#define MEM_PLUS(addr,offset) \
9613eaff 19608 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19609
b6c9286a
MM
19610 /* Under AIX, just build the 3 word function descriptor */
19611 case ABI_AIX:
8bd04c56 19612 {
9613eaff
SH
19613 rtx fn_reg = gen_reg_rtx (Pmode);
19614 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19615 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19616 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19617 emit_move_insn (MEM_DEREF (addr), fn_reg);
19618 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19619 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19620 }
b6c9286a
MM
19621 break;
19622
4dabc42d
TC
19623 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19624 case ABI_DARWIN:
b6c9286a 19625 case ABI_V4:
9613eaff 19626 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19627 FALSE, VOIDmode, 4,
9613eaff 19628 addr, Pmode,
eaf1bcf1 19629 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19630 fnaddr, Pmode,
19631 ctx_reg, Pmode);
b6c9286a 19632 break;
b6c9286a
MM
19633 }
19634
19635 return;
19636}
7509c759
MM
19637
19638\f
91d231cb 19639/* Table of valid machine attributes. */
a4f6c312 19640
91d231cb 19641const struct attribute_spec rs6000_attribute_table[] =
7509c759 19642{
91d231cb 19643 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19644 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19645 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19646 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19647 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19648 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19649#ifdef SUBTARGET_ATTRIBUTE_TABLE
19650 SUBTARGET_ATTRIBUTE_TABLE,
19651#endif
a5c76ee6 19652 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19653};
7509c759 19654
8bb418a3
ZL
19655/* Handle the "altivec" attribute. The attribute may have
19656 arguments as follows:
f676971a 19657
8bb418a3
ZL
19658 __attribute__((altivec(vector__)))
19659 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19660 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19661
19662 and may appear more than once (e.g., 'vector bool char') in a
19663 given declaration. */
19664
19665static tree
f90ac3f0
UP
19666rs6000_handle_altivec_attribute (tree *node,
19667 tree name ATTRIBUTE_UNUSED,
19668 tree args,
8bb418a3
ZL
19669 int flags ATTRIBUTE_UNUSED,
19670 bool *no_add_attrs)
19671{
19672 tree type = *node, result = NULL_TREE;
19673 enum machine_mode mode;
19674 int unsigned_p;
19675 char altivec_type
19676 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19677 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19678 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19679 : '?');
8bb418a3
ZL
19680
19681 while (POINTER_TYPE_P (type)
19682 || TREE_CODE (type) == FUNCTION_TYPE
19683 || TREE_CODE (type) == METHOD_TYPE
19684 || TREE_CODE (type) == ARRAY_TYPE)
19685 type = TREE_TYPE (type);
19686
19687 mode = TYPE_MODE (type);
19688
f90ac3f0
UP
19689 /* Check for invalid AltiVec type qualifiers. */
19690 if (type == long_unsigned_type_node || type == long_integer_type_node)
19691 {
19692 if (TARGET_64BIT)
19693 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19694 else if (rs6000_warn_altivec_long)
d4ee4d25 19695 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19696 }
19697 else if (type == long_long_unsigned_type_node
19698 || type == long_long_integer_type_node)
19699 error ("use of %<long long%> in AltiVec types is invalid");
19700 else if (type == double_type_node)
19701 error ("use of %<double%> in AltiVec types is invalid");
19702 else if (type == long_double_type_node)
19703 error ("use of %<long double%> in AltiVec types is invalid");
19704 else if (type == boolean_type_node)
19705 error ("use of boolean types in AltiVec types is invalid");
19706 else if (TREE_CODE (type) == COMPLEX_TYPE)
19707 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19708 else if (DECIMAL_FLOAT_MODE_P (mode))
19709 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19710
19711 switch (altivec_type)
19712 {
19713 case 'v':
8df83eae 19714 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19715 switch (mode)
19716 {
c4ad648e
AM
19717 case SImode:
19718 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19719 break;
19720 case HImode:
19721 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19722 break;
19723 case QImode:
19724 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19725 break;
19726 case SFmode: result = V4SF_type_node; break;
19727 /* If the user says 'vector int bool', we may be handed the 'bool'
19728 attribute _before_ the 'vector' attribute, and so select the
19729 proper type in the 'b' case below. */
19730 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19731 result = type;
19732 default: break;
8bb418a3
ZL
19733 }
19734 break;
19735 case 'b':
19736 switch (mode)
19737 {
c4ad648e
AM
19738 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19739 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19740 case QImode: case V16QImode: result = bool_V16QI_type_node;
19741 default: break;
8bb418a3
ZL
19742 }
19743 break;
19744 case 'p':
19745 switch (mode)
19746 {
c4ad648e
AM
19747 case V8HImode: result = pixel_V8HI_type_node;
19748 default: break;
8bb418a3
ZL
19749 }
19750 default: break;
19751 }
19752
7958a2a6
FJ
19753 if (result && result != type && TYPE_READONLY (type))
19754 result = build_qualified_type (result, TYPE_QUAL_CONST);
19755
8bb418a3
ZL
19756 *no_add_attrs = true; /* No need to hang on to the attribute. */
19757
f90ac3f0 19758 if (result)
8bb418a3
ZL
19759 *node = reconstruct_complex_type (*node, result);
19760
19761 return NULL_TREE;
19762}
19763
f18eca82
ZL
19764/* AltiVec defines four built-in scalar types that serve as vector
19765 elements; we must teach the compiler how to mangle them. */
19766
19767static const char *
3101faab 19768rs6000_mangle_type (const_tree type)
f18eca82 19769{
608063c3
JB
19770 type = TYPE_MAIN_VARIANT (type);
19771
19772 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19773 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19774 return NULL;
19775
f18eca82
ZL
19776 if (type == bool_char_type_node) return "U6__boolc";
19777 if (type == bool_short_type_node) return "U6__bools";
19778 if (type == pixel_type_node) return "u7__pixel";
19779 if (type == bool_int_type_node) return "U6__booli";
19780
337bde91
DE
19781 /* Mangle IBM extended float long double as `g' (__float128) on
19782 powerpc*-linux where long-double-64 previously was the default. */
19783 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19784 && TARGET_ELF
19785 && TARGET_LONG_DOUBLE_128
19786 && !TARGET_IEEEQUAD)
19787 return "g";
19788
f18eca82
ZL
19789 /* For all other types, use normal C++ mangling. */
19790 return NULL;
19791}
19792
a5c76ee6
ZW
19793/* Handle a "longcall" or "shortcall" attribute; arguments as in
19794 struct attribute_spec.handler. */
a4f6c312 19795
91d231cb 19796static tree
f676971a
EC
19797rs6000_handle_longcall_attribute (tree *node, tree name,
19798 tree args ATTRIBUTE_UNUSED,
19799 int flags ATTRIBUTE_UNUSED,
a2369ed3 19800 bool *no_add_attrs)
91d231cb
JM
19801{
19802 if (TREE_CODE (*node) != FUNCTION_TYPE
19803 && TREE_CODE (*node) != FIELD_DECL
19804 && TREE_CODE (*node) != TYPE_DECL)
19805 {
5c498b10 19806 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19807 IDENTIFIER_POINTER (name));
19808 *no_add_attrs = true;
19809 }
6a4cee5f 19810
91d231cb 19811 return NULL_TREE;
7509c759
MM
19812}
19813
a5c76ee6
ZW
19814/* Set longcall attributes on all functions declared when
19815 rs6000_default_long_calls is true. */
19816static void
a2369ed3 19817rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19818{
19819 if (rs6000_default_long_calls
19820 && (TREE_CODE (type) == FUNCTION_TYPE
19821 || TREE_CODE (type) == METHOD_TYPE))
19822 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19823 NULL_TREE,
19824 TYPE_ATTRIBUTES (type));
16d6f994
EC
19825
19826#if TARGET_MACHO
19827 darwin_set_default_type_attributes (type);
19828#endif
a5c76ee6
ZW
19829}
19830
3cb999d8
DE
19831/* Return a reference suitable for calling a function with the
19832 longcall attribute. */
a4f6c312 19833
9390387d 19834rtx
a2369ed3 19835rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19836{
d330fd93 19837 const char *call_name;
6a4cee5f
MM
19838 tree node;
19839
19840 if (GET_CODE (call_ref) != SYMBOL_REF)
19841 return call_ref;
19842
19843 /* System V adds '.' to the internal name, so skip them. */
19844 call_name = XSTR (call_ref, 0);
19845 if (*call_name == '.')
19846 {
19847 while (*call_name == '.')
19848 call_name++;
19849
19850 node = get_identifier (call_name);
39403d82 19851 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19852 }
19853
19854 return force_reg (Pmode, call_ref);
19855}
7509c759 19856\f
77ccdfed
EC
19857#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19858#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19859#endif
19860
19861/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19862 struct attribute_spec.handler. */
19863static tree
19864rs6000_handle_struct_attribute (tree *node, tree name,
19865 tree args ATTRIBUTE_UNUSED,
19866 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19867{
19868 tree *type = NULL;
19869 if (DECL_P (*node))
19870 {
19871 if (TREE_CODE (*node) == TYPE_DECL)
19872 type = &TREE_TYPE (*node);
19873 }
19874 else
19875 type = node;
19876
19877 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19878 || TREE_CODE (*type) == UNION_TYPE)))
19879 {
19880 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19881 *no_add_attrs = true;
19882 }
19883
19884 else if ((is_attribute_p ("ms_struct", name)
19885 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19886 || ((is_attribute_p ("gcc_struct", name)
19887 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19888 {
19889 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19890 IDENTIFIER_POINTER (name));
19891 *no_add_attrs = true;
19892 }
19893
19894 return NULL_TREE;
19895}
19896
19897static bool
3101faab 19898rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19899{
19900 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19901 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19902 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19903}
19904\f
b64a1b53
RH
19905#ifdef USING_ELFOS_H
19906
d6b5193b 19907/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19908
d6b5193b
RS
19909static void
19910rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19911{
19912 if (DEFAULT_ABI == ABI_AIX
19913 && TARGET_MINIMAL_TOC
19914 && !TARGET_RELOCATABLE)
19915 {
19916 if (!toc_initialized)
19917 {
19918 toc_initialized = 1;
19919 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19920 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19921 fprintf (asm_out_file, "\t.tc ");
19922 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19923 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19924 fprintf (asm_out_file, "\n");
19925
19926 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19927 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19928 fprintf (asm_out_file, " = .+32768\n");
19929 }
19930 else
19931 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19932 }
19933 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19934 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19935 else
19936 {
19937 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19938 if (!toc_initialized)
19939 {
19940 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19941 fprintf (asm_out_file, " = .+32768\n");
19942 toc_initialized = 1;
19943 }
19944 }
19945}
19946
19947/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19948
b64a1b53 19949static void
d6b5193b
RS
19950rs6000_elf_asm_init_sections (void)
19951{
19952 toc_section
19953 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19954
19955 sdata2_section
19956 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19957 SDATA2_SECTION_ASM_OP);
19958}
19959
19960/* Implement TARGET_SELECT_RTX_SECTION. */
19961
19962static section *
f676971a 19963rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19964 unsigned HOST_WIDE_INT align)
7509c759 19965{
a9098fd0 19966 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19967 return toc_section;
7509c759 19968 else
d6b5193b 19969 return default_elf_select_rtx_section (mode, x, align);
7509c759 19970}
d9407988 19971\f
d1908feb
JJ
19972/* For a SYMBOL_REF, set generic flags and then perform some
19973 target-specific processing.
19974
d1908feb
JJ
19975 When the AIX ABI is requested on a non-AIX system, replace the
19976 function name with the real name (with a leading .) rather than the
19977 function descriptor name. This saves a lot of overriding code to
19978 read the prefixes. */
d9407988 19979
fb49053f 19980static void
a2369ed3 19981rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19982{
d1908feb 19983 default_encode_section_info (decl, rtl, first);
b2003250 19984
d1908feb
JJ
19985 if (first
19986 && TREE_CODE (decl) == FUNCTION_DECL
19987 && !TARGET_AIX
19988 && DEFAULT_ABI == ABI_AIX)
d9407988 19989 {
c6a2438a 19990 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19991 size_t len = strlen (XSTR (sym_ref, 0));
19992 char *str = alloca (len + 2);
19993 str[0] = '.';
19994 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19995 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19996 }
d9407988
MM
19997}
19998
21d9bb3f
PB
19999static inline bool
20000compare_section_name (const char *section, const char *template)
20001{
20002 int len;
20003
20004 len = strlen (template);
20005 return (strncmp (section, template, len) == 0
20006 && (section[len] == 0 || section[len] == '.'));
20007}
20008
c1b7d95a 20009bool
3101faab 20010rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20011{
20012 if (rs6000_sdata == SDATA_NONE)
20013 return false;
20014
7482ad25
AF
20015 /* We want to merge strings, so we never consider them small data. */
20016 if (TREE_CODE (decl) == STRING_CST)
20017 return false;
20018
20019 /* Functions are never in the small data area. */
20020 if (TREE_CODE (decl) == FUNCTION_DECL)
20021 return false;
20022
0e5dbd9b
DE
20023 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20024 {
20025 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20026 if (compare_section_name (section, ".sdata")
20027 || compare_section_name (section, ".sdata2")
20028 || compare_section_name (section, ".gnu.linkonce.s")
20029 || compare_section_name (section, ".sbss")
20030 || compare_section_name (section, ".sbss2")
20031 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20032 || strcmp (section, ".PPC.EMB.sdata0") == 0
20033 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20034 return true;
20035 }
20036 else
20037 {
20038 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20039
20040 if (size > 0
307b599c 20041 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20042 /* If it's not public, and we're not going to reference it there,
20043 there's no need to put it in the small data section. */
0e5dbd9b
DE
20044 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20045 return true;
20046 }
20047
20048 return false;
20049}
20050
b91da81f 20051#endif /* USING_ELFOS_H */
aacd3885
RS
20052\f
20053/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20054
aacd3885 20055static bool
3101faab 20056rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20057{
20058 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20059}
a6c2a102 20060\f
000034eb 20061/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20062 ADDR can be effectively incremented by incrementing REG.
20063
20064 r0 is special and we must not select it as an address
20065 register by this routine since our caller will try to
20066 increment the returned register via an "la" instruction. */
000034eb 20067
9390387d 20068rtx
a2369ed3 20069find_addr_reg (rtx addr)
000034eb
DE
20070{
20071 while (GET_CODE (addr) == PLUS)
20072 {
02441cd6
JL
20073 if (GET_CODE (XEXP (addr, 0)) == REG
20074 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20075 addr = XEXP (addr, 0);
02441cd6
JL
20076 else if (GET_CODE (XEXP (addr, 1)) == REG
20077 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20078 addr = XEXP (addr, 1);
20079 else if (CONSTANT_P (XEXP (addr, 0)))
20080 addr = XEXP (addr, 1);
20081 else if (CONSTANT_P (XEXP (addr, 1)))
20082 addr = XEXP (addr, 0);
20083 else
37409796 20084 gcc_unreachable ();
000034eb 20085 }
37409796
NS
20086 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20087 return addr;
000034eb
DE
20088}
20089
a6c2a102 20090void
a2369ed3 20091rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20092{
20093 fatal_insn ("bad address", op);
20094}
c8023011 20095
ee890fe2
SS
20096#if TARGET_MACHO
20097
efdba735 20098static tree branch_island_list = 0;
ee890fe2 20099
efdba735
SH
20100/* Remember to generate a branch island for far calls to the given
20101 function. */
ee890fe2 20102
f676971a 20103static void
c4ad648e
AM
20104add_compiler_branch_island (tree label_name, tree function_name,
20105 int line_number)
ee890fe2 20106{
efdba735 20107 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20108 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20109 TREE_CHAIN (branch_island) = branch_island_list;
20110 branch_island_list = branch_island;
ee890fe2
SS
20111}
20112
efdba735
SH
20113#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20114#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20115#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20116 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20117
efdba735
SH
20118/* Generate far-jump branch islands for everything on the
20119 branch_island_list. Invoked immediately after the last instruction
20120 of the epilogue has been emitted; the branch-islands must be
20121 appended to, and contiguous with, the function body. Mach-O stubs
20122 are generated in machopic_output_stub(). */
ee890fe2 20123
efdba735
SH
20124static void
20125macho_branch_islands (void)
20126{
20127 char tmp_buf[512];
20128 tree branch_island;
20129
20130 for (branch_island = branch_island_list;
20131 branch_island;
20132 branch_island = TREE_CHAIN (branch_island))
20133 {
20134 const char *label =
20135 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20136 const char *name =
11abc112 20137 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20138 char name_buf[512];
20139 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20140 if (name[0] == '*' || name[0] == '&')
20141 strcpy (name_buf, name+1);
20142 else
20143 {
20144 name_buf[0] = '_';
20145 strcpy (name_buf+1, name);
20146 }
20147 strcpy (tmp_buf, "\n");
20148 strcat (tmp_buf, label);
ee890fe2 20149#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20150 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20151 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20152#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20153 if (flag_pic)
20154 {
20155 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20156 strcat (tmp_buf, label);
20157 strcat (tmp_buf, "_pic\n");
20158 strcat (tmp_buf, label);
20159 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20160
efdba735
SH
20161 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20162 strcat (tmp_buf, name_buf);
20163 strcat (tmp_buf, " - ");
20164 strcat (tmp_buf, label);
20165 strcat (tmp_buf, "_pic)\n");
f676971a 20166
efdba735 20167 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20168
efdba735
SH
20169 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20170 strcat (tmp_buf, name_buf);
20171 strcat (tmp_buf, " - ");
20172 strcat (tmp_buf, label);
20173 strcat (tmp_buf, "_pic)\n");
f676971a 20174
efdba735
SH
20175 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20176 }
20177 else
20178 {
20179 strcat (tmp_buf, ":\nlis r12,hi16(");
20180 strcat (tmp_buf, name_buf);
20181 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20182 strcat (tmp_buf, name_buf);
20183 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20184 }
20185 output_asm_insn (tmp_buf, 0);
ee890fe2 20186#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20187 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20188 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20189#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20190 }
ee890fe2 20191
efdba735 20192 branch_island_list = 0;
ee890fe2
SS
20193}
20194
20195/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20196 already there or not. */
20197
efdba735 20198static int
a2369ed3 20199no_previous_def (tree function_name)
ee890fe2 20200{
efdba735
SH
20201 tree branch_island;
20202 for (branch_island = branch_island_list;
20203 branch_island;
20204 branch_island = TREE_CHAIN (branch_island))
20205 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20206 return 0;
20207 return 1;
20208}
20209
20210/* GET_PREV_LABEL gets the label name from the previous definition of
20211 the function. */
20212
efdba735 20213static tree
a2369ed3 20214get_prev_label (tree function_name)
ee890fe2 20215{
efdba735
SH
20216 tree branch_island;
20217 for (branch_island = branch_island_list;
20218 branch_island;
20219 branch_island = TREE_CHAIN (branch_island))
20220 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20221 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20222 return 0;
20223}
20224
75b1b789
MS
20225#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20226#define DARWIN_LINKER_GENERATES_ISLANDS 0
20227#endif
20228
20229/* KEXTs still need branch islands. */
20230#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20231 || flag_mkernel || flag_apple_kext)
20232
ee890fe2 20233/* INSN is either a function call or a millicode call. It may have an
f676971a 20234 unconditional jump in its delay slot.
ee890fe2
SS
20235
20236 CALL_DEST is the routine we are calling. */
20237
20238char *
c4ad648e
AM
20239output_call (rtx insn, rtx *operands, int dest_operand_number,
20240 int cookie_operand_number)
ee890fe2
SS
20241{
20242 static char buf[256];
75b1b789
MS
20243 if (DARWIN_GENERATE_ISLANDS
20244 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20245 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20246 {
20247 tree labelname;
efdba735 20248 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20249
ee890fe2
SS
20250 if (no_previous_def (funname))
20251 {
ee890fe2
SS
20252 rtx label_rtx = gen_label_rtx ();
20253 char *label_buf, temp_buf[256];
20254 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20255 CODE_LABEL_NUMBER (label_rtx));
20256 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20257 labelname = get_identifier (label_buf);
a38e7aa5 20258 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20259 }
20260 else
20261 labelname = get_prev_label (funname);
20262
efdba735
SH
20263 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20264 instruction will reach 'foo', otherwise link as 'bl L42'".
20265 "L42" should be a 'branch island', that will do a far jump to
20266 'foo'. Branch islands are generated in
20267 macho_branch_islands(). */
ee890fe2 20268 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20269 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20270 }
20271 else
efdba735
SH
20272 sprintf (buf, "bl %%z%d", dest_operand_number);
20273 return buf;
ee890fe2
SS
20274}
20275
ee890fe2
SS
20276/* Generate PIC and indirect symbol stubs. */
20277
20278void
a2369ed3 20279machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20280{
20281 unsigned int length;
a4f6c312
SS
20282 char *symbol_name, *lazy_ptr_name;
20283 char *local_label_0;
ee890fe2
SS
20284 static int label = 0;
20285
df56a27f 20286 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20287 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20288
ee890fe2 20289
ee890fe2
SS
20290 length = strlen (symb);
20291 symbol_name = alloca (length + 32);
20292 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20293
20294 lazy_ptr_name = alloca (length + 32);
20295 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20296
ee890fe2 20297 if (flag_pic == 2)
56c779bc 20298 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20299 else
56c779bc 20300 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20301
20302 if (flag_pic == 2)
20303 {
d974312d
DJ
20304 fprintf (file, "\t.align 5\n");
20305
20306 fprintf (file, "%s:\n", stub);
20307 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20308
876455fa 20309 label++;
89da1f32 20310 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20311 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20312
ee890fe2
SS
20313 fprintf (file, "\tmflr r0\n");
20314 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20315 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20316 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20317 lazy_ptr_name, local_label_0);
20318 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20319 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20320 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20321 lazy_ptr_name, local_label_0);
20322 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20323 fprintf (file, "\tbctr\n");
20324 }
20325 else
d974312d
DJ
20326 {
20327 fprintf (file, "\t.align 4\n");
20328
20329 fprintf (file, "%s:\n", stub);
20330 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20331
20332 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20333 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20334 (TARGET_64BIT ? "ldu" : "lwzu"),
20335 lazy_ptr_name);
d974312d
DJ
20336 fprintf (file, "\tmtctr r12\n");
20337 fprintf (file, "\tbctr\n");
20338 }
f676971a 20339
56c779bc 20340 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20341 fprintf (file, "%s:\n", lazy_ptr_name);
20342 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20343 fprintf (file, "%sdyld_stub_binding_helper\n",
20344 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20345}
20346
20347/* Legitimize PIC addresses. If the address is already
20348 position-independent, we return ORIG. Newly generated
20349 position-independent addresses go into a reg. This is REG if non
20350 zero, otherwise we allocate register(s) as necessary. */
20351
4fbbe694 20352#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20353
20354rtx
f676971a 20355rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20356 rtx reg)
ee890fe2
SS
20357{
20358 rtx base, offset;
20359
20360 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20361 reg = gen_reg_rtx (Pmode);
20362
20363 if (GET_CODE (orig) == CONST)
20364 {
37409796
NS
20365 rtx reg_temp;
20366
ee890fe2
SS
20367 if (GET_CODE (XEXP (orig, 0)) == PLUS
20368 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20369 return orig;
20370
37409796 20371 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20372
37409796
NS
20373 /* Use a different reg for the intermediate value, as
20374 it will be marked UNCHANGING. */
b3a13419 20375 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20376 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20377 Pmode, reg_temp);
20378 offset =
20379 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20380 Pmode, reg);
bb8df8a6 20381
ee890fe2
SS
20382 if (GET_CODE (offset) == CONST_INT)
20383 {
20384 if (SMALL_INT (offset))
ed8908e7 20385 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20386 else if (! reload_in_progress && ! reload_completed)
20387 offset = force_reg (Pmode, offset);
20388 else
c859cda6
DJ
20389 {
20390 rtx mem = force_const_mem (Pmode, orig);
20391 return machopic_legitimize_pic_address (mem, Pmode, reg);
20392 }
ee890fe2 20393 }
f1c25d3b 20394 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20395 }
20396
20397 /* Fall back on generic machopic code. */
20398 return machopic_legitimize_pic_address (orig, mode, reg);
20399}
20400
c4e18b1c
GK
20401/* Output a .machine directive for the Darwin assembler, and call
20402 the generic start_file routine. */
20403
20404static void
20405rs6000_darwin_file_start (void)
20406{
94ff898d 20407 static const struct
c4e18b1c
GK
20408 {
20409 const char *arg;
20410 const char *name;
20411 int if_set;
20412 } mapping[] = {
55dbfb48 20413 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20414 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20415 { "power4", "ppc970", 0 },
20416 { "G5", "ppc970", 0 },
20417 { "7450", "ppc7450", 0 },
20418 { "7400", "ppc7400", MASK_ALTIVEC },
20419 { "G4", "ppc7400", 0 },
20420 { "750", "ppc750", 0 },
20421 { "740", "ppc750", 0 },
20422 { "G3", "ppc750", 0 },
20423 { "604e", "ppc604e", 0 },
20424 { "604", "ppc604", 0 },
20425 { "603e", "ppc603", 0 },
20426 { "603", "ppc603", 0 },
20427 { "601", "ppc601", 0 },
20428 { NULL, "ppc", 0 } };
20429 const char *cpu_id = "";
20430 size_t i;
94ff898d 20431
9390387d 20432 rs6000_file_start ();
192d0f89 20433 darwin_file_start ();
c4e18b1c
GK
20434
20435 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20436 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20437 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20438 && rs6000_select[i].string[0] != '\0')
20439 cpu_id = rs6000_select[i].string;
20440
20441 /* Look through the mapping array. Pick the first name that either
20442 matches the argument, has a bit set in IF_SET that is also set
20443 in the target flags, or has a NULL name. */
20444
20445 i = 0;
20446 while (mapping[i].arg != NULL
20447 && strcmp (mapping[i].arg, cpu_id) != 0
20448 && (mapping[i].if_set & target_flags) == 0)
20449 i++;
20450
20451 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20452}
20453
ee890fe2 20454#endif /* TARGET_MACHO */
7c262518
RH
20455
20456#if TARGET_ELF
9b580a0b
RH
20457static int
20458rs6000_elf_reloc_rw_mask (void)
7c262518 20459{
9b580a0b
RH
20460 if (flag_pic)
20461 return 3;
20462 else if (DEFAULT_ABI == ABI_AIX)
20463 return 2;
20464 else
20465 return 0;
7c262518 20466}
d9f6800d
RH
20467
20468/* Record an element in the table of global constructors. SYMBOL is
20469 a SYMBOL_REF of the function to be called; PRIORITY is a number
20470 between 0 and MAX_INIT_PRIORITY.
20471
20472 This differs from default_named_section_asm_out_constructor in
20473 that we have special handling for -mrelocatable. */
20474
20475static void
a2369ed3 20476rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20477{
20478 const char *section = ".ctors";
20479 char buf[16];
20480
20481 if (priority != DEFAULT_INIT_PRIORITY)
20482 {
20483 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20484 /* Invert the numbering so the linker puts us in the proper
20485 order; constructors are run from right to left, and the
20486 linker sorts in increasing order. */
20487 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20488 section = buf;
20489 }
20490
d6b5193b 20491 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20492 assemble_align (POINTER_SIZE);
d9f6800d
RH
20493
20494 if (TARGET_RELOCATABLE)
20495 {
20496 fputs ("\t.long (", asm_out_file);
20497 output_addr_const (asm_out_file, symbol);
20498 fputs (")@fixup\n", asm_out_file);
20499 }
20500 else
c8af3574 20501 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20502}
20503
20504static void
a2369ed3 20505rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20506{
20507 const char *section = ".dtors";
20508 char buf[16];
20509
20510 if (priority != DEFAULT_INIT_PRIORITY)
20511 {
20512 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20513 /* Invert the numbering so the linker puts us in the proper
20514 order; constructors are run from right to left, and the
20515 linker sorts in increasing order. */
20516 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20517 section = buf;
20518 }
20519
d6b5193b 20520 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20521 assemble_align (POINTER_SIZE);
d9f6800d
RH
20522
20523 if (TARGET_RELOCATABLE)
20524 {
20525 fputs ("\t.long (", asm_out_file);
20526 output_addr_const (asm_out_file, symbol);
20527 fputs (")@fixup\n", asm_out_file);
20528 }
20529 else
c8af3574 20530 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20531}
9739c90c
JJ
20532
20533void
a2369ed3 20534rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20535{
20536 if (TARGET_64BIT)
20537 {
20538 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20539 ASM_OUTPUT_LABEL (file, name);
20540 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20541 rs6000_output_function_entry (file, name);
20542 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20543 if (DOT_SYMBOLS)
9739c90c 20544 {
85b776df 20545 fputs ("\t.size\t", file);
9739c90c 20546 assemble_name (file, name);
85b776df
AM
20547 fputs (",24\n\t.type\t.", file);
20548 assemble_name (file, name);
20549 fputs (",@function\n", file);
20550 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20551 {
20552 fputs ("\t.globl\t.", file);
20553 assemble_name (file, name);
20554 putc ('\n', file);
20555 }
9739c90c 20556 }
85b776df
AM
20557 else
20558 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20559 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20560 rs6000_output_function_entry (file, name);
20561 fputs (":\n", file);
9739c90c
JJ
20562 return;
20563 }
20564
20565 if (TARGET_RELOCATABLE
7f970b70 20566 && !TARGET_SECURE_PLT
9739c90c 20567 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20568 && uses_TOC ())
9739c90c
JJ
20569 {
20570 char buf[256];
20571
20572 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20573
20574 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20575 fprintf (file, "\t.long ");
20576 assemble_name (file, buf);
20577 putc ('-', file);
20578 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20579 assemble_name (file, buf);
20580 putc ('\n', file);
20581 }
20582
20583 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20584 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20585
20586 if (DEFAULT_ABI == ABI_AIX)
20587 {
20588 const char *desc_name, *orig_name;
20589
20590 orig_name = (*targetm.strip_name_encoding) (name);
20591 desc_name = orig_name;
20592 while (*desc_name == '.')
20593 desc_name++;
20594
20595 if (TREE_PUBLIC (decl))
20596 fprintf (file, "\t.globl %s\n", desc_name);
20597
20598 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20599 fprintf (file, "%s:\n", desc_name);
20600 fprintf (file, "\t.long %s\n", orig_name);
20601 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20602 if (DEFAULT_ABI == ABI_AIX)
20603 fputs ("\t.long 0\n", file);
20604 fprintf (file, "\t.previous\n");
20605 }
20606 ASM_OUTPUT_LABEL (file, name);
20607}
1334b570
AM
20608
20609static void
20610rs6000_elf_end_indicate_exec_stack (void)
20611{
20612 if (TARGET_32BIT)
20613 file_end_indicate_exec_stack ();
20614}
7c262518
RH
20615#endif
20616
cbaaba19 20617#if TARGET_XCOFF
0d5817b2
DE
20618static void
20619rs6000_xcoff_asm_output_anchor (rtx symbol)
20620{
20621 char buffer[100];
20622
20623 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20624 SYMBOL_REF_BLOCK_OFFSET (symbol));
20625 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20626}
20627
7c262518 20628static void
a2369ed3 20629rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20630{
20631 fputs (GLOBAL_ASM_OP, stream);
20632 RS6000_OUTPUT_BASENAME (stream, name);
20633 putc ('\n', stream);
20634}
20635
d6b5193b
RS
20636/* A get_unnamed_decl callback, used for read-only sections. PTR
20637 points to the section string variable. */
20638
20639static void
20640rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20641{
890f9edf
OH
20642 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20643 *(const char *const *) directive,
20644 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20645}
20646
20647/* Likewise for read-write sections. */
20648
20649static void
20650rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20651{
890f9edf
OH
20652 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20653 *(const char *const *) directive,
20654 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20655}
20656
20657/* A get_unnamed_section callback, used for switching to toc_section. */
20658
20659static void
20660rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20661{
20662 if (TARGET_MINIMAL_TOC)
20663 {
20664 /* toc_section is always selected at least once from
20665 rs6000_xcoff_file_start, so this is guaranteed to
20666 always be defined once and only once in each file. */
20667 if (!toc_initialized)
20668 {
20669 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20670 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20671 toc_initialized = 1;
20672 }
20673 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20674 (TARGET_32BIT ? "" : ",3"));
20675 }
20676 else
20677 fputs ("\t.toc\n", asm_out_file);
20678}
20679
20680/* Implement TARGET_ASM_INIT_SECTIONS. */
20681
20682static void
20683rs6000_xcoff_asm_init_sections (void)
20684{
20685 read_only_data_section
20686 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20687 &xcoff_read_only_section_name);
20688
20689 private_data_section
20690 = get_unnamed_section (SECTION_WRITE,
20691 rs6000_xcoff_output_readwrite_section_asm_op,
20692 &xcoff_private_data_section_name);
20693
20694 read_only_private_data_section
20695 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20696 &xcoff_private_data_section_name);
20697
20698 toc_section
20699 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20700
20701 readonly_data_section = read_only_data_section;
20702 exception_section = data_section;
20703}
20704
9b580a0b
RH
20705static int
20706rs6000_xcoff_reloc_rw_mask (void)
20707{
20708 return 3;
20709}
20710
b275d088 20711static void
c18a5b6c
MM
20712rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20713 tree decl ATTRIBUTE_UNUSED)
7c262518 20714{
0e5dbd9b
DE
20715 int smclass;
20716 static const char * const suffix[3] = { "PR", "RO", "RW" };
20717
20718 if (flags & SECTION_CODE)
20719 smclass = 0;
20720 else if (flags & SECTION_WRITE)
20721 smclass = 2;
20722 else
20723 smclass = 1;
20724
5b5198f7 20725 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20726 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20727 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20728}
ae46c4e0 20729
d6b5193b 20730static section *
f676971a 20731rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20732 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20733{
9b580a0b 20734 if (decl_readonly_section (decl, reloc))
ae46c4e0 20735 {
0e5dbd9b 20736 if (TREE_PUBLIC (decl))
d6b5193b 20737 return read_only_data_section;
ae46c4e0 20738 else
d6b5193b 20739 return read_only_private_data_section;
ae46c4e0
RH
20740 }
20741 else
20742 {
0e5dbd9b 20743 if (TREE_PUBLIC (decl))
d6b5193b 20744 return data_section;
ae46c4e0 20745 else
d6b5193b 20746 return private_data_section;
ae46c4e0
RH
20747 }
20748}
20749
20750static void
a2369ed3 20751rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20752{
20753 const char *name;
ae46c4e0 20754
5b5198f7
DE
20755 /* Use select_section for private and uninitialized data. */
20756 if (!TREE_PUBLIC (decl)
20757 || DECL_COMMON (decl)
0e5dbd9b
DE
20758 || DECL_INITIAL (decl) == NULL_TREE
20759 || DECL_INITIAL (decl) == error_mark_node
20760 || (flag_zero_initialized_in_bss
20761 && initializer_zerop (DECL_INITIAL (decl))))
20762 return;
20763
20764 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20765 name = (*targetm.strip_name_encoding) (name);
20766 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20767}
b64a1b53 20768
fb49053f
RH
20769/* Select section for constant in constant pool.
20770
20771 On RS/6000, all constants are in the private read-only data area.
20772 However, if this is being placed in the TOC it must be output as a
20773 toc entry. */
20774
d6b5193b 20775static section *
f676971a 20776rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20777 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20778{
20779 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20780 return toc_section;
b64a1b53 20781 else
d6b5193b 20782 return read_only_private_data_section;
b64a1b53 20783}
772c5265
RH
20784
20785/* Remove any trailing [DS] or the like from the symbol name. */
20786
20787static const char *
a2369ed3 20788rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20789{
20790 size_t len;
20791 if (*name == '*')
20792 name++;
20793 len = strlen (name);
20794 if (name[len - 1] == ']')
20795 return ggc_alloc_string (name, len - 4);
20796 else
20797 return name;
20798}
20799
5add3202
DE
20800/* Section attributes. AIX is always PIC. */
20801
20802static unsigned int
a2369ed3 20803rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20804{
5b5198f7 20805 unsigned int align;
9b580a0b 20806 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20807
20808 /* Align to at least UNIT size. */
20809 if (flags & SECTION_CODE)
20810 align = MIN_UNITS_PER_WORD;
20811 else
20812 /* Increase alignment of large objects if not already stricter. */
20813 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20814 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20815 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20816
20817 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20818}
a5fe455b 20819
1bc7c5b6
ZW
20820/* Output at beginning of assembler file.
20821
20822 Initialize the section names for the RS/6000 at this point.
20823
20824 Specify filename, including full path, to assembler.
20825
20826 We want to go into the TOC section so at least one .toc will be emitted.
20827 Also, in order to output proper .bs/.es pairs, we need at least one static
20828 [RW] section emitted.
20829
20830 Finally, declare mcount when profiling to make the assembler happy. */
20831
20832static void
863d938c 20833rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20834{
20835 rs6000_gen_section_name (&xcoff_bss_section_name,
20836 main_input_filename, ".bss_");
20837 rs6000_gen_section_name (&xcoff_private_data_section_name,
20838 main_input_filename, ".rw_");
20839 rs6000_gen_section_name (&xcoff_read_only_section_name,
20840 main_input_filename, ".ro_");
20841
20842 fputs ("\t.file\t", asm_out_file);
20843 output_quoted_string (asm_out_file, main_input_filename);
20844 fputc ('\n', asm_out_file);
1bc7c5b6 20845 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20846 switch_to_section (private_data_section);
20847 switch_to_section (text_section);
1bc7c5b6
ZW
20848 if (profile_flag)
20849 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20850 rs6000_file_start ();
20851}
20852
a5fe455b
ZW
20853/* Output at end of assembler file.
20854 On the RS/6000, referencing data should automatically pull in text. */
20855
20856static void
863d938c 20857rs6000_xcoff_file_end (void)
a5fe455b 20858{
d6b5193b 20859 switch_to_section (text_section);
a5fe455b 20860 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20861 switch_to_section (data_section);
a5fe455b
ZW
20862 fputs (TARGET_32BIT
20863 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20864 asm_out_file);
20865}
f1384257 20866#endif /* TARGET_XCOFF */
0e5dbd9b 20867
3c50106f
RH
20868/* Compute a (partial) cost for rtx X. Return true if the complete
20869 cost has been computed, and false if subexpressions should be
20870 scanned. In either case, *TOTAL contains the cost result. */
20871
20872static bool
1494c534 20873rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20874{
f0517163
RS
20875 enum machine_mode mode = GET_MODE (x);
20876
3c50106f
RH
20877 switch (code)
20878 {
30a555d9 20879 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20880 case CONST_INT:
066cd967
DE
20881 if (((outer_code == SET
20882 || outer_code == PLUS
20883 || outer_code == MINUS)
279bb624
DE
20884 && (satisfies_constraint_I (x)
20885 || satisfies_constraint_L (x)))
066cd967 20886 || (outer_code == AND
279bb624
DE
20887 && (satisfies_constraint_K (x)
20888 || (mode == SImode
20889 ? satisfies_constraint_L (x)
20890 : satisfies_constraint_J (x))
1990cd79
AM
20891 || mask_operand (x, mode)
20892 || (mode == DImode
20893 && mask64_operand (x, DImode))))
22e54023 20894 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20895 && (satisfies_constraint_K (x)
20896 || (mode == SImode
20897 ? satisfies_constraint_L (x)
20898 : satisfies_constraint_J (x))))
066cd967
DE
20899 || outer_code == ASHIFT
20900 || outer_code == ASHIFTRT
20901 || outer_code == LSHIFTRT
20902 || outer_code == ROTATE
20903 || outer_code == ROTATERT
d5861a7a 20904 || outer_code == ZERO_EXTRACT
066cd967 20905 || (outer_code == MULT
279bb624 20906 && satisfies_constraint_I (x))
22e54023
DE
20907 || ((outer_code == DIV || outer_code == UDIV
20908 || outer_code == MOD || outer_code == UMOD)
20909 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20910 || (outer_code == COMPARE
279bb624
DE
20911 && (satisfies_constraint_I (x)
20912 || satisfies_constraint_K (x)))
22e54023 20913 || (outer_code == EQ
279bb624
DE
20914 && (satisfies_constraint_I (x)
20915 || satisfies_constraint_K (x)
20916 || (mode == SImode
20917 ? satisfies_constraint_L (x)
20918 : satisfies_constraint_J (x))))
22e54023 20919 || (outer_code == GTU
279bb624 20920 && satisfies_constraint_I (x))
22e54023 20921 || (outer_code == LTU
279bb624 20922 && satisfies_constraint_P (x)))
066cd967
DE
20923 {
20924 *total = 0;
20925 return true;
20926 }
20927 else if ((outer_code == PLUS
4ae234b0 20928 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20929 || (outer_code == MINUS
4ae234b0 20930 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20931 || ((outer_code == SET
20932 || outer_code == IOR
20933 || outer_code == XOR)
20934 && (INTVAL (x)
20935 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20936 {
20937 *total = COSTS_N_INSNS (1);
20938 return true;
20939 }
20940 /* FALLTHRU */
20941
20942 case CONST_DOUBLE:
f6fe3a22 20943 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20944 {
f6fe3a22
DE
20945 if ((outer_code == IOR || outer_code == XOR)
20946 && CONST_DOUBLE_HIGH (x) == 0
20947 && (CONST_DOUBLE_LOW (x)
20948 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20949 {
20950 *total = 0;
20951 return true;
20952 }
20953 else if ((outer_code == AND && and64_2_operand (x, DImode))
20954 || ((outer_code == SET
20955 || outer_code == IOR
20956 || outer_code == XOR)
20957 && CONST_DOUBLE_HIGH (x) == 0))
20958 {
20959 *total = COSTS_N_INSNS (1);
20960 return true;
20961 }
066cd967
DE
20962 }
20963 /* FALLTHRU */
20964
3c50106f 20965 case CONST:
066cd967 20966 case HIGH:
3c50106f 20967 case SYMBOL_REF:
066cd967
DE
20968 case MEM:
20969 /* When optimizing for size, MEM should be slightly more expensive
20970 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20971 L1 cache latency is about two instructions. */
066cd967 20972 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20973 return true;
20974
30a555d9
DE
20975 case LABEL_REF:
20976 *total = 0;
20977 return true;
20978
3c50106f 20979 case PLUS:
f0517163 20980 if (mode == DFmode)
066cd967
DE
20981 {
20982 if (GET_CODE (XEXP (x, 0)) == MULT)
20983 {
20984 /* FNMA accounted in outer NEG. */
20985 if (outer_code == NEG)
20986 *total = rs6000_cost->dmul - rs6000_cost->fp;
20987 else
20988 *total = rs6000_cost->dmul;
20989 }
20990 else
20991 *total = rs6000_cost->fp;
20992 }
f0517163 20993 else if (mode == SFmode)
066cd967
DE
20994 {
20995 /* FNMA accounted in outer NEG. */
20996 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20997 *total = 0;
20998 else
20999 *total = rs6000_cost->fp;
21000 }
f0517163 21001 else
066cd967
DE
21002 *total = COSTS_N_INSNS (1);
21003 return false;
3c50106f 21004
52190329 21005 case MINUS:
f0517163 21006 if (mode == DFmode)
066cd967 21007 {
762c919f
JM
21008 if (GET_CODE (XEXP (x, 0)) == MULT
21009 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21010 {
21011 /* FNMA accounted in outer NEG. */
21012 if (outer_code == NEG)
762c919f 21013 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21014 else
21015 *total = rs6000_cost->dmul;
21016 }
21017 else
21018 *total = rs6000_cost->fp;
21019 }
f0517163 21020 else if (mode == SFmode)
066cd967
DE
21021 {
21022 /* FNMA accounted in outer NEG. */
21023 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21024 *total = 0;
21025 else
21026 *total = rs6000_cost->fp;
21027 }
f0517163 21028 else
c4ad648e 21029 *total = COSTS_N_INSNS (1);
066cd967 21030 return false;
3c50106f
RH
21031
21032 case MULT:
c9dbf840 21033 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21034 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21035 {
8b897cfa
RS
21036 if (INTVAL (XEXP (x, 1)) >= -256
21037 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21038 *total = rs6000_cost->mulsi_const9;
8b897cfa 21039 else
06a67bdd 21040 *total = rs6000_cost->mulsi_const;
3c50106f 21041 }
066cd967
DE
21042 /* FMA accounted in outer PLUS/MINUS. */
21043 else if ((mode == DFmode || mode == SFmode)
21044 && (outer_code == PLUS || outer_code == MINUS))
21045 *total = 0;
f0517163 21046 else if (mode == DFmode)
06a67bdd 21047 *total = rs6000_cost->dmul;
f0517163 21048 else if (mode == SFmode)
06a67bdd 21049 *total = rs6000_cost->fp;
f0517163 21050 else if (mode == DImode)
06a67bdd 21051 *total = rs6000_cost->muldi;
8b897cfa 21052 else
06a67bdd 21053 *total = rs6000_cost->mulsi;
066cd967 21054 return false;
3c50106f
RH
21055
21056 case DIV:
21057 case MOD:
f0517163
RS
21058 if (FLOAT_MODE_P (mode))
21059 {
06a67bdd
RS
21060 *total = mode == DFmode ? rs6000_cost->ddiv
21061 : rs6000_cost->sdiv;
066cd967 21062 return false;
f0517163 21063 }
5efb1046 21064 /* FALLTHRU */
3c50106f
RH
21065
21066 case UDIV:
21067 case UMOD:
627b6fe2
DJ
21068 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21069 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21070 {
21071 if (code == DIV || code == MOD)
21072 /* Shift, addze */
21073 *total = COSTS_N_INSNS (2);
21074 else
21075 /* Shift */
21076 *total = COSTS_N_INSNS (1);
21077 }
c4ad648e 21078 else
627b6fe2
DJ
21079 {
21080 if (GET_MODE (XEXP (x, 1)) == DImode)
21081 *total = rs6000_cost->divdi;
21082 else
21083 *total = rs6000_cost->divsi;
21084 }
21085 /* Add in shift and subtract for MOD. */
21086 if (code == MOD || code == UMOD)
21087 *total += COSTS_N_INSNS (2);
066cd967 21088 return false;
3c50106f 21089
32f56aad 21090 case CTZ:
3c50106f
RH
21091 case FFS:
21092 *total = COSTS_N_INSNS (4);
066cd967 21093 return false;
3c50106f 21094
32f56aad
DE
21095 case POPCOUNT:
21096 *total = COSTS_N_INSNS (6);
21097 return false;
21098
06a67bdd 21099 case NOT:
066cd967
DE
21100 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21101 {
21102 *total = 0;
21103 return false;
21104 }
21105 /* FALLTHRU */
21106
21107 case AND:
32f56aad 21108 case CLZ:
066cd967
DE
21109 case IOR:
21110 case XOR:
d5861a7a
DE
21111 case ZERO_EXTRACT:
21112 *total = COSTS_N_INSNS (1);
21113 return false;
21114
066cd967
DE
21115 case ASHIFT:
21116 case ASHIFTRT:
21117 case LSHIFTRT:
21118 case ROTATE:
21119 case ROTATERT:
d5861a7a 21120 /* Handle mul_highpart. */
066cd967
DE
21121 if (outer_code == TRUNCATE
21122 && GET_CODE (XEXP (x, 0)) == MULT)
21123 {
21124 if (mode == DImode)
21125 *total = rs6000_cost->muldi;
21126 else
21127 *total = rs6000_cost->mulsi;
21128 return true;
21129 }
d5861a7a
DE
21130 else if (outer_code == AND)
21131 *total = 0;
21132 else
21133 *total = COSTS_N_INSNS (1);
21134 return false;
21135
21136 case SIGN_EXTEND:
21137 case ZERO_EXTEND:
21138 if (GET_CODE (XEXP (x, 0)) == MEM)
21139 *total = 0;
21140 else
21141 *total = COSTS_N_INSNS (1);
066cd967 21142 return false;
06a67bdd 21143
066cd967
DE
21144 case COMPARE:
21145 case NEG:
21146 case ABS:
21147 if (!FLOAT_MODE_P (mode))
21148 {
21149 *total = COSTS_N_INSNS (1);
21150 return false;
21151 }
21152 /* FALLTHRU */
21153
21154 case FLOAT:
21155 case UNSIGNED_FLOAT:
21156 case FIX:
21157 case UNSIGNED_FIX:
06a67bdd
RS
21158 case FLOAT_TRUNCATE:
21159 *total = rs6000_cost->fp;
066cd967 21160 return false;
06a67bdd 21161
a2af5043
DJ
21162 case FLOAT_EXTEND:
21163 if (mode == DFmode)
21164 *total = 0;
21165 else
21166 *total = rs6000_cost->fp;
21167 return false;
21168
06a67bdd
RS
21169 case UNSPEC:
21170 switch (XINT (x, 1))
21171 {
21172 case UNSPEC_FRSP:
21173 *total = rs6000_cost->fp;
21174 return true;
21175
21176 default:
21177 break;
21178 }
21179 break;
21180
21181 case CALL:
21182 case IF_THEN_ELSE:
21183 if (optimize_size)
21184 {
21185 *total = COSTS_N_INSNS (1);
21186 return true;
21187 }
066cd967
DE
21188 else if (FLOAT_MODE_P (mode)
21189 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21190 {
21191 *total = rs6000_cost->fp;
21192 return false;
21193 }
06a67bdd
RS
21194 break;
21195
c0600ecd
DE
21196 case EQ:
21197 case GTU:
21198 case LTU:
22e54023
DE
21199 /* Carry bit requires mode == Pmode.
21200 NEG or PLUS already counted so only add one. */
21201 if (mode == Pmode
21202 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21203 {
22e54023
DE
21204 *total = COSTS_N_INSNS (1);
21205 return true;
21206 }
21207 if (outer_code == SET)
21208 {
21209 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21210 {
22e54023 21211 *total = COSTS_N_INSNS (2);
c0600ecd 21212 return true;
c0600ecd 21213 }
22e54023
DE
21214 else if (mode == Pmode)
21215 {
21216 *total = COSTS_N_INSNS (3);
21217 return false;
21218 }
21219 }
21220 /* FALLTHRU */
21221
21222 case GT:
21223 case LT:
21224 case UNORDERED:
21225 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21226 {
21227 *total = COSTS_N_INSNS (2);
21228 return true;
c0600ecd 21229 }
22e54023
DE
21230 /* CC COMPARE. */
21231 if (outer_code == COMPARE)
21232 {
21233 *total = 0;
21234 return true;
21235 }
21236 break;
c0600ecd 21237
3c50106f 21238 default:
06a67bdd 21239 break;
3c50106f 21240 }
06a67bdd
RS
21241
21242 return false;
3c50106f
RH
21243}
21244
34bb030a
DE
21245/* A C expression returning the cost of moving data from a register of class
21246 CLASS1 to one of CLASS2. */
21247
21248int
f676971a 21249rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21250 enum reg_class from, enum reg_class to)
34bb030a
DE
21251{
21252 /* Moves from/to GENERAL_REGS. */
21253 if (reg_classes_intersect_p (to, GENERAL_REGS)
21254 || reg_classes_intersect_p (from, GENERAL_REGS))
21255 {
21256 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21257 from = to;
21258
21259 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21260 return (rs6000_memory_move_cost (mode, from, 0)
21261 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21262
c4ad648e
AM
21263 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21264 shift. */
34bb030a
DE
21265 else if (from == CR_REGS)
21266 return 4;
21267
21268 else
c4ad648e 21269 /* A move will cost one instruction per GPR moved. */
c8b622ff 21270 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21271 }
21272
c4ad648e 21273 /* Moving between two similar registers is just one instruction. */
34bb030a 21274 else if (reg_classes_intersect_p (to, from))
7393f7f8 21275 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21276
c4ad648e 21277 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21278 else
f676971a 21279 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21280 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21281}
21282
21283/* A C expressions returning the cost of moving data of MODE from a register to
21284 or from memory. */
21285
21286int
f676971a 21287rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21288 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21289{
21290 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21291 return 4 * hard_regno_nregs[0][mode];
34bb030a 21292 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21293 return 4 * hard_regno_nregs[32][mode];
34bb030a 21294 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21295 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21296 else
21297 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21298}
21299
9c78b944
DE
21300/* Returns a code for a target-specific builtin that implements
21301 reciprocal of the function, or NULL_TREE if not available. */
21302
21303static tree
21304rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21305 bool sqrt ATTRIBUTE_UNUSED)
21306{
21307 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21308 && flag_finite_math_only && !flag_trapping_math
21309 && flag_unsafe_math_optimizations))
21310 return NULL_TREE;
21311
21312 if (md_fn)
21313 return NULL_TREE;
21314 else
21315 switch (fn)
21316 {
21317 case BUILT_IN_SQRTF:
21318 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21319
21320 default:
21321 return NULL_TREE;
21322 }
21323}
21324
ef765ea9
DE
21325/* Newton-Raphson approximation of single-precision floating point divide n/d.
21326 Assumes no trapping math and finite arguments. */
21327
21328void
9c78b944 21329rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21330{
21331 rtx x0, e0, e1, y1, u0, v0, one;
21332
21333 x0 = gen_reg_rtx (SFmode);
21334 e0 = gen_reg_rtx (SFmode);
21335 e1 = gen_reg_rtx (SFmode);
21336 y1 = gen_reg_rtx (SFmode);
21337 u0 = gen_reg_rtx (SFmode);
21338 v0 = gen_reg_rtx (SFmode);
21339 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21340
21341 /* x0 = 1./d estimate */
21342 emit_insn (gen_rtx_SET (VOIDmode, x0,
21343 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21344 UNSPEC_FRES)));
21345 /* e0 = 1. - d * x0 */
21346 emit_insn (gen_rtx_SET (VOIDmode, e0,
21347 gen_rtx_MINUS (SFmode, one,
21348 gen_rtx_MULT (SFmode, d, x0))));
21349 /* e1 = e0 + e0 * e0 */
21350 emit_insn (gen_rtx_SET (VOIDmode, e1,
21351 gen_rtx_PLUS (SFmode,
21352 gen_rtx_MULT (SFmode, e0, e0), e0)));
21353 /* y1 = x0 + e1 * x0 */
21354 emit_insn (gen_rtx_SET (VOIDmode, y1,
21355 gen_rtx_PLUS (SFmode,
21356 gen_rtx_MULT (SFmode, e1, x0), x0)));
21357 /* u0 = n * y1 */
21358 emit_insn (gen_rtx_SET (VOIDmode, u0,
21359 gen_rtx_MULT (SFmode, n, y1)));
21360 /* v0 = n - d * u0 */
21361 emit_insn (gen_rtx_SET (VOIDmode, v0,
21362 gen_rtx_MINUS (SFmode, n,
21363 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21364 /* dst = u0 + v0 * y1 */
21365 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21366 gen_rtx_PLUS (SFmode,
21367 gen_rtx_MULT (SFmode, v0, y1), u0)));
21368}
21369
21370/* Newton-Raphson approximation of double-precision floating point divide n/d.
21371 Assumes no trapping math and finite arguments. */
21372
21373void
9c78b944 21374rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21375{
21376 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21377
21378 x0 = gen_reg_rtx (DFmode);
21379 e0 = gen_reg_rtx (DFmode);
21380 e1 = gen_reg_rtx (DFmode);
21381 e2 = gen_reg_rtx (DFmode);
21382 y1 = gen_reg_rtx (DFmode);
21383 y2 = gen_reg_rtx (DFmode);
21384 y3 = gen_reg_rtx (DFmode);
21385 u0 = gen_reg_rtx (DFmode);
21386 v0 = gen_reg_rtx (DFmode);
21387 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21388
21389 /* x0 = 1./d estimate */
21390 emit_insn (gen_rtx_SET (VOIDmode, x0,
21391 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21392 UNSPEC_FRES)));
21393 /* e0 = 1. - d * x0 */
21394 emit_insn (gen_rtx_SET (VOIDmode, e0,
21395 gen_rtx_MINUS (DFmode, one,
21396 gen_rtx_MULT (SFmode, d, x0))));
21397 /* y1 = x0 + e0 * x0 */
21398 emit_insn (gen_rtx_SET (VOIDmode, y1,
21399 gen_rtx_PLUS (DFmode,
21400 gen_rtx_MULT (DFmode, e0, x0), x0)));
21401 /* e1 = e0 * e0 */
21402 emit_insn (gen_rtx_SET (VOIDmode, e1,
21403 gen_rtx_MULT (DFmode, e0, e0)));
21404 /* y2 = y1 + e1 * y1 */
21405 emit_insn (gen_rtx_SET (VOIDmode, y2,
21406 gen_rtx_PLUS (DFmode,
21407 gen_rtx_MULT (DFmode, e1, y1), y1)));
21408 /* e2 = e1 * e1 */
21409 emit_insn (gen_rtx_SET (VOIDmode, e2,
21410 gen_rtx_MULT (DFmode, e1, e1)));
21411 /* y3 = y2 + e2 * y2 */
21412 emit_insn (gen_rtx_SET (VOIDmode, y3,
21413 gen_rtx_PLUS (DFmode,
21414 gen_rtx_MULT (DFmode, e2, y2), y2)));
21415 /* u0 = n * y3 */
21416 emit_insn (gen_rtx_SET (VOIDmode, u0,
21417 gen_rtx_MULT (DFmode, n, y3)));
21418 /* v0 = n - d * u0 */
21419 emit_insn (gen_rtx_SET (VOIDmode, v0,
21420 gen_rtx_MINUS (DFmode, n,
21421 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21422 /* dst = u0 + v0 * y3 */
21423 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21424 gen_rtx_PLUS (DFmode,
21425 gen_rtx_MULT (DFmode, v0, y3), u0)));
21426}
21427
565ef4ba 21428
9c78b944
DE
21429/* Newton-Raphson approximation of single-precision floating point rsqrt.
21430 Assumes no trapping math and finite arguments. */
21431
21432void
21433rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21434{
21435 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21436 half, one, halfthree, c1, cond, label;
21437
21438 x0 = gen_reg_rtx (SFmode);
21439 x1 = gen_reg_rtx (SFmode);
21440 x2 = gen_reg_rtx (SFmode);
21441 y1 = gen_reg_rtx (SFmode);
21442 u0 = gen_reg_rtx (SFmode);
21443 u1 = gen_reg_rtx (SFmode);
21444 u2 = gen_reg_rtx (SFmode);
21445 v0 = gen_reg_rtx (SFmode);
21446 v1 = gen_reg_rtx (SFmode);
21447 v2 = gen_reg_rtx (SFmode);
21448 t0 = gen_reg_rtx (SFmode);
21449 halfthree = gen_reg_rtx (SFmode);
21450 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21451 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21452
21453 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21454 emit_insn (gen_rtx_SET (VOIDmode, t0,
21455 gen_rtx_MULT (SFmode, src, src)));
21456
21457 emit_insn (gen_rtx_SET (VOIDmode, cond,
21458 gen_rtx_COMPARE (CCFPmode, t0, src)));
21459 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21460 emit_unlikely_jump (c1, label);
21461
21462 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21463 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21464
21465 /* halfthree = 1.5 = 1.0 + 0.5 */
21466 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21467 gen_rtx_PLUS (SFmode, one, half)));
21468
21469 /* x0 = rsqrt estimate */
21470 emit_insn (gen_rtx_SET (VOIDmode, x0,
21471 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21472 UNSPEC_RSQRT)));
21473
21474 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21475 emit_insn (gen_rtx_SET (VOIDmode, y1,
21476 gen_rtx_MINUS (SFmode,
21477 gen_rtx_MULT (SFmode, src, halfthree),
21478 src)));
21479
21480 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21481 emit_insn (gen_rtx_SET (VOIDmode, u0,
21482 gen_rtx_MULT (SFmode, x0, x0)));
21483 emit_insn (gen_rtx_SET (VOIDmode, v0,
21484 gen_rtx_MINUS (SFmode,
21485 halfthree,
21486 gen_rtx_MULT (SFmode, y1, u0))));
21487 emit_insn (gen_rtx_SET (VOIDmode, x1,
21488 gen_rtx_MULT (SFmode, x0, v0)));
21489
21490 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21491 emit_insn (gen_rtx_SET (VOIDmode, u1,
21492 gen_rtx_MULT (SFmode, x1, x1)));
21493 emit_insn (gen_rtx_SET (VOIDmode, v1,
21494 gen_rtx_MINUS (SFmode,
21495 halfthree,
21496 gen_rtx_MULT (SFmode, y1, u1))));
21497 emit_insn (gen_rtx_SET (VOIDmode, x2,
21498 gen_rtx_MULT (SFmode, x1, v1)));
21499
21500 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21501 emit_insn (gen_rtx_SET (VOIDmode, u2,
21502 gen_rtx_MULT (SFmode, x2, x2)));
21503 emit_insn (gen_rtx_SET (VOIDmode, v2,
21504 gen_rtx_MINUS (SFmode,
21505 halfthree,
21506 gen_rtx_MULT (SFmode, y1, u2))));
21507 emit_insn (gen_rtx_SET (VOIDmode, dst,
21508 gen_rtx_MULT (SFmode, x2, v2)));
21509
21510 emit_label (XEXP (label, 0));
21511}
21512
565ef4ba
RS
21513/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21514 target, and SRC is the argument operand. */
21515
21516void
21517rs6000_emit_popcount (rtx dst, rtx src)
21518{
21519 enum machine_mode mode = GET_MODE (dst);
21520 rtx tmp1, tmp2;
21521
21522 tmp1 = gen_reg_rtx (mode);
21523
21524 if (mode == SImode)
21525 {
21526 emit_insn (gen_popcntbsi2 (tmp1, src));
21527 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21528 NULL_RTX, 0);
21529 tmp2 = force_reg (SImode, tmp2);
21530 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21531 }
21532 else
21533 {
21534 emit_insn (gen_popcntbdi2 (tmp1, src));
21535 tmp2 = expand_mult (DImode, tmp1,
21536 GEN_INT ((HOST_WIDE_INT)
21537 0x01010101 << 32 | 0x01010101),
21538 NULL_RTX, 0);
21539 tmp2 = force_reg (DImode, tmp2);
21540 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21541 }
21542}
21543
21544
21545/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21546 target, and SRC is the argument operand. */
21547
21548void
21549rs6000_emit_parity (rtx dst, rtx src)
21550{
21551 enum machine_mode mode = GET_MODE (dst);
21552 rtx tmp;
21553
21554 tmp = gen_reg_rtx (mode);
21555 if (mode == SImode)
21556 {
21557 /* Is mult+shift >= shift+xor+shift+xor? */
21558 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21559 {
21560 rtx tmp1, tmp2, tmp3, tmp4;
21561
21562 tmp1 = gen_reg_rtx (SImode);
21563 emit_insn (gen_popcntbsi2 (tmp1, src));
21564
21565 tmp2 = gen_reg_rtx (SImode);
21566 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21567 tmp3 = gen_reg_rtx (SImode);
21568 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21569
21570 tmp4 = gen_reg_rtx (SImode);
21571 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21572 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21573 }
21574 else
21575 rs6000_emit_popcount (tmp, src);
21576 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21577 }
21578 else
21579 {
21580 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21581 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21582 {
21583 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21584
21585 tmp1 = gen_reg_rtx (DImode);
21586 emit_insn (gen_popcntbdi2 (tmp1, src));
21587
21588 tmp2 = gen_reg_rtx (DImode);
21589 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21590 tmp3 = gen_reg_rtx (DImode);
21591 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21592
21593 tmp4 = gen_reg_rtx (DImode);
21594 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21595 tmp5 = gen_reg_rtx (DImode);
21596 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21597
21598 tmp6 = gen_reg_rtx (DImode);
21599 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21600 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21601 }
21602 else
21603 rs6000_emit_popcount (tmp, src);
21604 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21605 }
21606}
21607
ded9bf77
AH
21608/* Return an RTX representing where to find the function value of a
21609 function returning MODE. */
21610static rtx
21611rs6000_complex_function_value (enum machine_mode mode)
21612{
21613 unsigned int regno;
21614 rtx r1, r2;
21615 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21616 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21617
18f63bfa
AH
21618 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21619 regno = FP_ARG_RETURN;
354ed18f
AH
21620 else
21621 {
18f63bfa 21622 regno = GP_ARG_RETURN;
ded9bf77 21623
18f63bfa
AH
21624 /* 32-bit is OK since it'll go in r3/r4. */
21625 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21626 return gen_rtx_REG (mode, regno);
21627 }
21628
18f63bfa
AH
21629 if (inner_bytes >= 8)
21630 return gen_rtx_REG (mode, regno);
21631
ded9bf77
AH
21632 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21633 const0_rtx);
21634 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21635 GEN_INT (inner_bytes));
ded9bf77
AH
21636 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21637}
21638
a6ebc39a
AH
21639/* Define how to find the value returned by a function.
21640 VALTYPE is the data type of the value (as a tree).
21641 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21642 otherwise, FUNC is 0.
21643
21644 On the SPE, both FPs and vectors are returned in r3.
21645
21646 On RS/6000 an integer value is in r3 and a floating-point value is in
21647 fp1, unless -msoft-float. */
21648
21649rtx
586de218 21650rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21651{
21652 enum machine_mode mode;
2a8fa26c 21653 unsigned int regno;
a6ebc39a 21654
594a51fe
SS
21655 /* Special handling for structs in darwin64. */
21656 if (rs6000_darwin64_abi
21657 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21658 && TREE_CODE (valtype) == RECORD_TYPE
21659 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21660 {
21661 CUMULATIVE_ARGS valcum;
21662 rtx valret;
21663
0b5383eb 21664 valcum.words = 0;
594a51fe
SS
21665 valcum.fregno = FP_ARG_MIN_REG;
21666 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21667 /* Do a trial code generation as if this were going to be passed as
21668 an argument; if any part goes in memory, we return NULL. */
21669 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21670 if (valret)
21671 return valret;
21672 /* Otherwise fall through to standard ABI rules. */
21673 }
21674
0e67400a
FJ
21675 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21676 {
21677 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21678 return gen_rtx_PARALLEL (DImode,
21679 gen_rtvec (2,
21680 gen_rtx_EXPR_LIST (VOIDmode,
21681 gen_rtx_REG (SImode, GP_ARG_RETURN),
21682 const0_rtx),
21683 gen_rtx_EXPR_LIST (VOIDmode,
21684 gen_rtx_REG (SImode,
21685 GP_ARG_RETURN + 1),
21686 GEN_INT (4))));
21687 }
0f086e42
FJ
21688 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21689 {
21690 return gen_rtx_PARALLEL (DCmode,
21691 gen_rtvec (4,
21692 gen_rtx_EXPR_LIST (VOIDmode,
21693 gen_rtx_REG (SImode, GP_ARG_RETURN),
21694 const0_rtx),
21695 gen_rtx_EXPR_LIST (VOIDmode,
21696 gen_rtx_REG (SImode,
21697 GP_ARG_RETURN + 1),
21698 GEN_INT (4)),
21699 gen_rtx_EXPR_LIST (VOIDmode,
21700 gen_rtx_REG (SImode,
21701 GP_ARG_RETURN + 2),
21702 GEN_INT (8)),
21703 gen_rtx_EXPR_LIST (VOIDmode,
21704 gen_rtx_REG (SImode,
21705 GP_ARG_RETURN + 3),
21706 GEN_INT (12))));
21707 }
602ea4d3 21708
7348aa7f
FXC
21709 mode = TYPE_MODE (valtype);
21710 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21711 || POINTER_TYPE_P (valtype))
b78d48dd 21712 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21713
e41b2a33
PB
21714 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21715 /* _Decimal128 must use an even/odd register pair. */
21716 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21717 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21718 regno = FP_ARG_RETURN;
ded9bf77 21719 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21720 && targetm.calls.split_complex_arg)
ded9bf77 21721 return rs6000_complex_function_value (mode);
44688022 21722 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21723 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21724 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21725 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21726 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21727 && (mode == DFmode || mode == DDmode || mode == DCmode
21728 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21729 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21730 else
21731 regno = GP_ARG_RETURN;
21732
21733 return gen_rtx_REG (mode, regno);
21734}
21735
ded9bf77
AH
21736/* Define how to find the value returned by a library function
21737 assuming the value has mode MODE. */
21738rtx
21739rs6000_libcall_value (enum machine_mode mode)
21740{
21741 unsigned int regno;
21742
2e6c9641
FJ
21743 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21744 {
21745 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21746 return gen_rtx_PARALLEL (DImode,
21747 gen_rtvec (2,
21748 gen_rtx_EXPR_LIST (VOIDmode,
21749 gen_rtx_REG (SImode, GP_ARG_RETURN),
21750 const0_rtx),
21751 gen_rtx_EXPR_LIST (VOIDmode,
21752 gen_rtx_REG (SImode,
21753 GP_ARG_RETURN + 1),
21754 GEN_INT (4))));
21755 }
21756
e41b2a33
PB
21757 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21758 /* _Decimal128 must use an even/odd register pair. */
21759 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21760 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21761 && TARGET_HARD_FLOAT && TARGET_FPRS)
21762 regno = FP_ARG_RETURN;
44688022
AM
21763 else if (ALTIVEC_VECTOR_MODE (mode)
21764 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21765 regno = ALTIVEC_ARG_RETURN;
42ba5130 21766 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21767 return rs6000_complex_function_value (mode);
18f63bfa 21768 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21769 && (mode == DFmode || mode == DDmode || mode == DCmode
21770 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21771 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21772 else
21773 regno = GP_ARG_RETURN;
21774
21775 return gen_rtx_REG (mode, regno);
21776}
21777
d1d0c603
JJ
21778/* Define the offset between two registers, FROM to be eliminated and its
21779 replacement TO, at the start of a routine. */
21780HOST_WIDE_INT
21781rs6000_initial_elimination_offset (int from, int to)
21782{
21783 rs6000_stack_t *info = rs6000_stack_info ();
21784 HOST_WIDE_INT offset;
21785
7d5175e1 21786 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21787 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21788 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21789 {
21790 offset = info->push_p ? 0 : -info->total_size;
21791 if (FRAME_GROWS_DOWNWARD)
5b667039 21792 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21793 }
21794 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21795 offset = FRAME_GROWS_DOWNWARD
5b667039 21796 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21797 : 0;
21798 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21799 offset = info->total_size;
21800 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21801 offset = info->push_p ? info->total_size : 0;
21802 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21803 offset = 0;
21804 else
37409796 21805 gcc_unreachable ();
d1d0c603
JJ
21806
21807 return offset;
21808}
21809
58646b77 21810/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21811
c8e4f0e9 21812static bool
3101faab 21813rs6000_is_opaque_type (const_tree type)
62e1dfcf 21814{
58646b77 21815 return (type == opaque_V2SI_type_node
2abe3e28 21816 || type == opaque_V2SF_type_node
58646b77
PB
21817 || type == opaque_p_V2SI_type_node
21818 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21819}
21820
96714395 21821static rtx
a2369ed3 21822rs6000_dwarf_register_span (rtx reg)
96714395
AH
21823{
21824 unsigned regno;
21825
4d4cbc0e
AH
21826 if (TARGET_SPE
21827 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21828 || (TARGET_E500_DOUBLE
21829 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21830 ;
21831 else
96714395
AH
21832 return NULL_RTX;
21833
21834 regno = REGNO (reg);
21835
21836 /* The duality of the SPE register size wreaks all kinds of havoc.
21837 This is a way of distinguishing r0 in 32-bits from r0 in
21838 64-bits. */
21839 return
21840 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21841 BYTES_BIG_ENDIAN
21842 ? gen_rtvec (2,
21843 gen_rtx_REG (SImode, regno + 1200),
21844 gen_rtx_REG (SImode, regno))
21845 : gen_rtvec (2,
21846 gen_rtx_REG (SImode, regno),
21847 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21848}
21849
37ea0b7e
JM
21850/* Fill in sizes for SPE register high parts in table used by unwinder. */
21851
21852static void
21853rs6000_init_dwarf_reg_sizes_extra (tree address)
21854{
21855 if (TARGET_SPE)
21856 {
21857 int i;
21858 enum machine_mode mode = TYPE_MODE (char_type_node);
21859 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21860 rtx mem = gen_rtx_MEM (BLKmode, addr);
21861 rtx value = gen_int_mode (4, mode);
21862
21863 for (i = 1201; i < 1232; i++)
21864 {
21865 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21866 HOST_WIDE_INT offset
21867 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21868
21869 emit_move_insn (adjust_address (mem, mode, offset), value);
21870 }
21871 }
21872}
21873
93c9d1ba
AM
21874/* Map internal gcc register numbers to DWARF2 register numbers. */
21875
21876unsigned int
21877rs6000_dbx_register_number (unsigned int regno)
21878{
21879 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21880 return regno;
21881 if (regno == MQ_REGNO)
21882 return 100;
1de43f85 21883 if (regno == LR_REGNO)
93c9d1ba 21884 return 108;
1de43f85 21885 if (regno == CTR_REGNO)
93c9d1ba
AM
21886 return 109;
21887 if (CR_REGNO_P (regno))
21888 return regno - CR0_REGNO + 86;
21889 if (regno == XER_REGNO)
21890 return 101;
21891 if (ALTIVEC_REGNO_P (regno))
21892 return regno - FIRST_ALTIVEC_REGNO + 1124;
21893 if (regno == VRSAVE_REGNO)
21894 return 356;
21895 if (regno == VSCR_REGNO)
21896 return 67;
21897 if (regno == SPE_ACC_REGNO)
21898 return 99;
21899 if (regno == SPEFSCR_REGNO)
21900 return 612;
21901 /* SPE high reg number. We get these values of regno from
21902 rs6000_dwarf_register_span. */
37409796
NS
21903 gcc_assert (regno >= 1200 && regno < 1232);
21904 return regno;
93c9d1ba
AM
21905}
21906
93f90be6 21907/* target hook eh_return_filter_mode */
f676971a 21908static enum machine_mode
93f90be6
FJ
21909rs6000_eh_return_filter_mode (void)
21910{
21911 return TARGET_32BIT ? SImode : word_mode;
21912}
21913
00b79d54
BE
21914/* Target hook for scalar_mode_supported_p. */
21915static bool
21916rs6000_scalar_mode_supported_p (enum machine_mode mode)
21917{
21918 if (DECIMAL_FLOAT_MODE_P (mode))
21919 return true;
21920 else
21921 return default_scalar_mode_supported_p (mode);
21922}
21923
f676971a
EC
21924/* Target hook for vector_mode_supported_p. */
21925static bool
21926rs6000_vector_mode_supported_p (enum machine_mode mode)
21927{
21928
96038623
DE
21929 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21930 return true;
21931
f676971a
EC
21932 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21933 return true;
21934
21935 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21936 return true;
21937
21938 else
21939 return false;
21940}
21941
bb8df8a6
EC
21942/* Target hook for invalid_arg_for_unprototyped_fn. */
21943static const char *
3101faab 21944invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21945{
21946 return (!rs6000_darwin64_abi
21947 && typelist == 0
21948 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21949 && (funcdecl == NULL_TREE
21950 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21951 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21952 ? N_("AltiVec argument passed to unprototyped function")
21953 : NULL;
21954}
21955
3aebbe5f
JJ
21956/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21957 setup by using __stack_chk_fail_local hidden function instead of
21958 calling __stack_chk_fail directly. Otherwise it is better to call
21959 __stack_chk_fail directly. */
21960
21961static tree
21962rs6000_stack_protect_fail (void)
21963{
21964 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21965 ? default_hidden_stack_protect_fail ()
21966 : default_external_stack_protect_fail ();
21967}
21968
17211ab5 21969#include "gt-rs6000.h"