]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR libstdc++/35969 (GLIBCXX_DEBUG: list::merge triggers bad assert)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
a3170dc6
AH
177/* Nonzero if we want SPE ABI extensions. */
178int rs6000_spe_abi;
179
5da702b1
AH
180/* Nonzero if floating point operations are done in the GPRs. */
181int rs6000_float_gprs = 0;
182
594a51fe
SS
183/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
184int rs6000_darwin64_abi;
185
a0ab749a 186/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 187static GTY(()) int common_mode_defined;
c81bebd7 188
9878760c
RK
189/* Save information from a "cmpxx" operation until the branch or scc is
190 emitted. */
9878760c
RK
191rtx rs6000_compare_op0, rs6000_compare_op1;
192int rs6000_compare_fp_p;
874a0744 193
874a0744
MM
194/* Label number of label created for -mrelocatable, to call to so we can
195 get the address of the GOT section */
196int rs6000_pic_labelno;
c81bebd7 197
b91da81f 198#ifdef USING_ELFOS_H
c81bebd7 199/* Which abi to adhere to */
9739c90c 200const char *rs6000_abi_name;
d9407988
MM
201
202/* Semantics of the small data area */
203enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
204
205/* Which small data model to use */
815cdc52 206const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
207
208/* Counter for labels which are to be placed in .fixup. */
209int fixuplabelno = 0;
874a0744 210#endif
4697a36c 211
c4501e62
JJ
212/* Bit size of immediate TLS offsets and string from which it is decoded. */
213int rs6000_tls_size = 32;
214const char *rs6000_tls_size_string;
215
b6c9286a
MM
216/* ABI enumeration available for subtarget to use. */
217enum rs6000_abi rs6000_current_abi;
218
85b776df
AM
219/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
220int dot_symbols;
221
38c1f2d7 222/* Debug flags */
815cdc52 223const char *rs6000_debug_name;
38c1f2d7
MM
224int rs6000_debug_stack; /* debug stack applications */
225int rs6000_debug_arg; /* debug argument handling */
226
aabcd309 227/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
228bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
229
58646b77
PB
230/* Built in types. */
231
232tree rs6000_builtin_types[RS6000_BTI_MAX];
233tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 234
57ac7be9
AM
235const char *rs6000_traceback_name;
236static enum {
237 traceback_default = 0,
238 traceback_none,
239 traceback_part,
240 traceback_full
241} rs6000_traceback;
242
38c1f2d7
MM
243/* Flag to say the TOC is initialized */
244int toc_initialized;
9ebbca7d 245char toc_label_name[10];
38c1f2d7 246
44cd321e
PS
247/* Cached value of rs6000_variable_issue. This is cached in
248 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
249static short cached_can_issue_more;
250
d6b5193b
RS
251static GTY(()) section *read_only_data_section;
252static GTY(()) section *private_data_section;
253static GTY(()) section *read_only_private_data_section;
254static GTY(()) section *sdata2_section;
255static GTY(()) section *toc_section;
256
a3c9585f
KH
257/* Control alignment for fields within structures. */
258/* String from -malign-XXXXX. */
025d9908
KH
259int rs6000_alignment_flags;
260
78f5898b
AH
261/* True for any options that were explicitly set. */
262struct {
df01da37 263 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 264 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
265 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
266 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
267 bool spe; /* True if -mspe= was used. */
268 bool float_gprs; /* True if -mfloat-gprs= was used. */
269 bool isel; /* True if -misel was used. */
270 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 271 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 272 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
273} rs6000_explicit_options;
274
a3170dc6
AH
275struct builtin_description
276{
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
280 unsigned int mask;
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
284};
8b897cfa
RS
285\f
286/* Target cpu costs. */
287
288struct processor_costs {
c4ad648e 289 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
299 const int cache_line_size; /* cache line size in bytes. */
300 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
301 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
302 const int simultaneous_prefetches; /* number of parallel prefetch
303 operations. */
8b897cfa
RS
304};
305
306const struct processor_costs *rs6000_cost;
307
308/* Processor costs (relative to an add) */
309
310/* Instruction size costs on 32bit processors. */
311static const
312struct processor_costs size32_cost = {
06a67bdd
RS
313 COSTS_N_INSNS (1), /* mulsi */
314 COSTS_N_INSNS (1), /* mulsi_const */
315 COSTS_N_INSNS (1), /* mulsi_const9 */
316 COSTS_N_INSNS (1), /* muldi */
317 COSTS_N_INSNS (1), /* divsi */
318 COSTS_N_INSNS (1), /* divdi */
319 COSTS_N_INSNS (1), /* fp */
320 COSTS_N_INSNS (1), /* dmul */
321 COSTS_N_INSNS (1), /* sdiv */
322 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
323 32,
324 0,
325 0,
5f732aba 326 0,
8b897cfa
RS
327};
328
329/* Instruction size costs on 64bit processors. */
330static const
331struct processor_costs size64_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (1), /* mulsi */
333 COSTS_N_INSNS (1), /* mulsi_const */
334 COSTS_N_INSNS (1), /* mulsi_const9 */
335 COSTS_N_INSNS (1), /* muldi */
336 COSTS_N_INSNS (1), /* divsi */
337 COSTS_N_INSNS (1), /* divdi */
338 COSTS_N_INSNS (1), /* fp */
339 COSTS_N_INSNS (1), /* dmul */
340 COSTS_N_INSNS (1), /* sdiv */
341 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
342 128,
343 0,
344 0,
5f732aba 345 0,
8b897cfa
RS
346};
347
348/* Instruction costs on RIOS1 processors. */
349static const
350struct processor_costs rios1_cost = {
06a67bdd
RS
351 COSTS_N_INSNS (5), /* mulsi */
352 COSTS_N_INSNS (4), /* mulsi_const */
353 COSTS_N_INSNS (3), /* mulsi_const9 */
354 COSTS_N_INSNS (5), /* muldi */
355 COSTS_N_INSNS (19), /* divsi */
356 COSTS_N_INSNS (19), /* divdi */
357 COSTS_N_INSNS (2), /* fp */
358 COSTS_N_INSNS (2), /* dmul */
359 COSTS_N_INSNS (19), /* sdiv */
360 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 361 128, /* cache line size */
5f732aba
DE
362 64, /* l1 cache */
363 512, /* l2 cache */
0b11da67 364 0, /* streams */
8b897cfa
RS
365};
366
367/* Instruction costs on RIOS2 processors. */
368static const
369struct processor_costs rios2_cost = {
06a67bdd
RS
370 COSTS_N_INSNS (2), /* mulsi */
371 COSTS_N_INSNS (2), /* mulsi_const */
372 COSTS_N_INSNS (2), /* mulsi_const9 */
373 COSTS_N_INSNS (2), /* muldi */
374 COSTS_N_INSNS (13), /* divsi */
375 COSTS_N_INSNS (13), /* divdi */
376 COSTS_N_INSNS (2), /* fp */
377 COSTS_N_INSNS (2), /* dmul */
378 COSTS_N_INSNS (17), /* sdiv */
379 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 380 256, /* cache line size */
5f732aba
DE
381 256, /* l1 cache */
382 1024, /* l2 cache */
0b11da67 383 0, /* streams */
8b897cfa
RS
384};
385
386/* Instruction costs on RS64A processors. */
387static const
388struct processor_costs rs64a_cost = {
06a67bdd
RS
389 COSTS_N_INSNS (20), /* mulsi */
390 COSTS_N_INSNS (12), /* mulsi_const */
391 COSTS_N_INSNS (8), /* mulsi_const9 */
392 COSTS_N_INSNS (34), /* muldi */
393 COSTS_N_INSNS (65), /* divsi */
394 COSTS_N_INSNS (67), /* divdi */
395 COSTS_N_INSNS (4), /* fp */
396 COSTS_N_INSNS (4), /* dmul */
397 COSTS_N_INSNS (31), /* sdiv */
398 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 399 128, /* cache line size */
5f732aba
DE
400 128, /* l1 cache */
401 2048, /* l2 cache */
0b11da67 402 1, /* streams */
8b897cfa
RS
403};
404
405/* Instruction costs on MPCCORE processors. */
406static const
407struct processor_costs mpccore_cost = {
06a67bdd
RS
408 COSTS_N_INSNS (2), /* mulsi */
409 COSTS_N_INSNS (2), /* mulsi_const */
410 COSTS_N_INSNS (2), /* mulsi_const9 */
411 COSTS_N_INSNS (2), /* muldi */
412 COSTS_N_INSNS (6), /* divsi */
413 COSTS_N_INSNS (6), /* divdi */
414 COSTS_N_INSNS (4), /* fp */
415 COSTS_N_INSNS (5), /* dmul */
416 COSTS_N_INSNS (10), /* sdiv */
417 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 418 32, /* cache line size */
5f732aba
DE
419 4, /* l1 cache */
420 16, /* l2 cache */
0b11da67 421 1, /* streams */
8b897cfa
RS
422};
423
424/* Instruction costs on PPC403 processors. */
425static const
426struct processor_costs ppc403_cost = {
06a67bdd
RS
427 COSTS_N_INSNS (4), /* mulsi */
428 COSTS_N_INSNS (4), /* mulsi_const */
429 COSTS_N_INSNS (4), /* mulsi_const9 */
430 COSTS_N_INSNS (4), /* muldi */
431 COSTS_N_INSNS (33), /* divsi */
432 COSTS_N_INSNS (33), /* divdi */
433 COSTS_N_INSNS (11), /* fp */
434 COSTS_N_INSNS (11), /* dmul */
435 COSTS_N_INSNS (11), /* sdiv */
436 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 437 32, /* cache line size */
5f732aba
DE
438 4, /* l1 cache */
439 16, /* l2 cache */
0b11da67 440 1, /* streams */
8b897cfa
RS
441};
442
443/* Instruction costs on PPC405 processors. */
444static const
445struct processor_costs ppc405_cost = {
06a67bdd
RS
446 COSTS_N_INSNS (5), /* mulsi */
447 COSTS_N_INSNS (4), /* mulsi_const */
448 COSTS_N_INSNS (3), /* mulsi_const9 */
449 COSTS_N_INSNS (5), /* muldi */
450 COSTS_N_INSNS (35), /* divsi */
451 COSTS_N_INSNS (35), /* divdi */
452 COSTS_N_INSNS (11), /* fp */
453 COSTS_N_INSNS (11), /* dmul */
454 COSTS_N_INSNS (11), /* sdiv */
455 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 456 32, /* cache line size */
5f732aba
DE
457 16, /* l1 cache */
458 128, /* l2 cache */
0b11da67 459 1, /* streams */
8b897cfa
RS
460};
461
462/* Instruction costs on PPC440 processors. */
463static const
464struct processor_costs ppc440_cost = {
06a67bdd
RS
465 COSTS_N_INSNS (3), /* mulsi */
466 COSTS_N_INSNS (2), /* mulsi_const */
467 COSTS_N_INSNS (2), /* mulsi_const9 */
468 COSTS_N_INSNS (3), /* muldi */
469 COSTS_N_INSNS (34), /* divsi */
470 COSTS_N_INSNS (34), /* divdi */
471 COSTS_N_INSNS (5), /* fp */
472 COSTS_N_INSNS (5), /* dmul */
473 COSTS_N_INSNS (19), /* sdiv */
474 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 475 32, /* cache line size */
5f732aba
DE
476 32, /* l1 cache */
477 256, /* l2 cache */
0b11da67 478 1, /* streams */
8b897cfa
RS
479};
480
481/* Instruction costs on PPC601 processors. */
482static const
483struct processor_costs ppc601_cost = {
06a67bdd
RS
484 COSTS_N_INSNS (5), /* mulsi */
485 COSTS_N_INSNS (5), /* mulsi_const */
486 COSTS_N_INSNS (5), /* mulsi_const9 */
487 COSTS_N_INSNS (5), /* muldi */
488 COSTS_N_INSNS (36), /* divsi */
489 COSTS_N_INSNS (36), /* divdi */
490 COSTS_N_INSNS (4), /* fp */
491 COSTS_N_INSNS (5), /* dmul */
492 COSTS_N_INSNS (17), /* sdiv */
493 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 494 32, /* cache line size */
5f732aba
DE
495 32, /* l1 cache */
496 256, /* l2 cache */
0b11da67 497 1, /* streams */
8b897cfa
RS
498};
499
500/* Instruction costs on PPC603 processors. */
501static const
502struct processor_costs ppc603_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (3), /* mulsi_const */
505 COSTS_N_INSNS (2), /* mulsi_const9 */
506 COSTS_N_INSNS (5), /* muldi */
507 COSTS_N_INSNS (37), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (4), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 513 32, /* cache line size */
5f732aba
DE
514 8, /* l1 cache */
515 64, /* l2 cache */
0b11da67 516 1, /* streams */
8b897cfa
RS
517};
518
519/* Instruction costs on PPC604 processors. */
520static const
521struct processor_costs ppc604_cost = {
06a67bdd
RS
522 COSTS_N_INSNS (4), /* mulsi */
523 COSTS_N_INSNS (4), /* mulsi_const */
524 COSTS_N_INSNS (4), /* mulsi_const9 */
525 COSTS_N_INSNS (4), /* muldi */
526 COSTS_N_INSNS (20), /* divsi */
527 COSTS_N_INSNS (20), /* divdi */
528 COSTS_N_INSNS (3), /* fp */
529 COSTS_N_INSNS (3), /* dmul */
530 COSTS_N_INSNS (18), /* sdiv */
531 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 532 32, /* cache line size */
5f732aba
DE
533 16, /* l1 cache */
534 512, /* l2 cache */
0b11da67 535 1, /* streams */
8b897cfa
RS
536};
537
538/* Instruction costs on PPC604e processors. */
539static const
540struct processor_costs ppc604e_cost = {
06a67bdd
RS
541 COSTS_N_INSNS (2), /* mulsi */
542 COSTS_N_INSNS (2), /* mulsi_const */
543 COSTS_N_INSNS (2), /* mulsi_const9 */
544 COSTS_N_INSNS (2), /* muldi */
545 COSTS_N_INSNS (20), /* divsi */
546 COSTS_N_INSNS (20), /* divdi */
547 COSTS_N_INSNS (3), /* fp */
548 COSTS_N_INSNS (3), /* dmul */
549 COSTS_N_INSNS (18), /* sdiv */
550 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 551 32, /* cache line size */
5f732aba
DE
552 32, /* l1 cache */
553 1024, /* l2 cache */
0b11da67 554 1, /* streams */
8b897cfa
RS
555};
556
f0517163 557/* Instruction costs on PPC620 processors. */
8b897cfa
RS
558static const
559struct processor_costs ppc620_cost = {
06a67bdd
RS
560 COSTS_N_INSNS (5), /* mulsi */
561 COSTS_N_INSNS (4), /* mulsi_const */
562 COSTS_N_INSNS (3), /* mulsi_const9 */
563 COSTS_N_INSNS (7), /* muldi */
564 COSTS_N_INSNS (21), /* divsi */
565 COSTS_N_INSNS (37), /* divdi */
566 COSTS_N_INSNS (3), /* fp */
567 COSTS_N_INSNS (3), /* dmul */
568 COSTS_N_INSNS (18), /* sdiv */
569 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 570 128, /* cache line size */
5f732aba
DE
571 32, /* l1 cache */
572 1024, /* l2 cache */
0b11da67 573 1, /* streams */
f0517163
RS
574};
575
576/* Instruction costs on PPC630 processors. */
577static const
578struct processor_costs ppc630_cost = {
06a67bdd
RS
579 COSTS_N_INSNS (5), /* mulsi */
580 COSTS_N_INSNS (4), /* mulsi_const */
581 COSTS_N_INSNS (3), /* mulsi_const9 */
582 COSTS_N_INSNS (7), /* muldi */
583 COSTS_N_INSNS (21), /* divsi */
584 COSTS_N_INSNS (37), /* divdi */
585 COSTS_N_INSNS (3), /* fp */
586 COSTS_N_INSNS (3), /* dmul */
587 COSTS_N_INSNS (17), /* sdiv */
588 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 589 128, /* cache line size */
5f732aba
DE
590 64, /* l1 cache */
591 1024, /* l2 cache */
0b11da67 592 1, /* streams */
8b897cfa
RS
593};
594
d296e02e
AP
595/* Instruction costs on Cell processor. */
596/* COSTS_N_INSNS (1) ~ one add. */
597static const
598struct processor_costs ppccell_cost = {
599 COSTS_N_INSNS (9/2)+2, /* mulsi */
600 COSTS_N_INSNS (6/2), /* mulsi_const */
601 COSTS_N_INSNS (6/2), /* mulsi_const9 */
602 COSTS_N_INSNS (15/2)+2, /* muldi */
603 COSTS_N_INSNS (38/2), /* divsi */
604 COSTS_N_INSNS (70/2), /* divdi */
605 COSTS_N_INSNS (10/2), /* fp */
606 COSTS_N_INSNS (10/2), /* dmul */
607 COSTS_N_INSNS (74/2), /* sdiv */
608 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 609 128, /* cache line size */
5f732aba
DE
610 32, /* l1 cache */
611 512, /* l2 cache */
612 6, /* streams */
d296e02e
AP
613};
614
8b897cfa
RS
615/* Instruction costs on PPC750 and PPC7400 processors. */
616static const
617struct processor_costs ppc750_cost = {
06a67bdd
RS
618 COSTS_N_INSNS (5), /* mulsi */
619 COSTS_N_INSNS (3), /* mulsi_const */
620 COSTS_N_INSNS (2), /* mulsi_const9 */
621 COSTS_N_INSNS (5), /* muldi */
622 COSTS_N_INSNS (17), /* divsi */
623 COSTS_N_INSNS (17), /* divdi */
624 COSTS_N_INSNS (3), /* fp */
625 COSTS_N_INSNS (3), /* dmul */
626 COSTS_N_INSNS (17), /* sdiv */
627 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 628 32, /* cache line size */
5f732aba
DE
629 32, /* l1 cache */
630 512, /* l2 cache */
0b11da67 631 1, /* streams */
8b897cfa
RS
632};
633
634/* Instruction costs on PPC7450 processors. */
635static const
636struct processor_costs ppc7450_cost = {
06a67bdd
RS
637 COSTS_N_INSNS (4), /* mulsi */
638 COSTS_N_INSNS (3), /* mulsi_const */
639 COSTS_N_INSNS (3), /* mulsi_const9 */
640 COSTS_N_INSNS (4), /* muldi */
641 COSTS_N_INSNS (23), /* divsi */
642 COSTS_N_INSNS (23), /* divdi */
643 COSTS_N_INSNS (5), /* fp */
644 COSTS_N_INSNS (5), /* dmul */
645 COSTS_N_INSNS (21), /* sdiv */
646 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 647 32, /* cache line size */
5f732aba
DE
648 32, /* l1 cache */
649 1024, /* l2 cache */
0b11da67 650 1, /* streams */
8b897cfa 651};
a3170dc6 652
8b897cfa
RS
653/* Instruction costs on PPC8540 processors. */
654static const
655struct processor_costs ppc8540_cost = {
06a67bdd
RS
656 COSTS_N_INSNS (4), /* mulsi */
657 COSTS_N_INSNS (4), /* mulsi_const */
658 COSTS_N_INSNS (4), /* mulsi_const9 */
659 COSTS_N_INSNS (4), /* muldi */
660 COSTS_N_INSNS (19), /* divsi */
661 COSTS_N_INSNS (19), /* divdi */
662 COSTS_N_INSNS (4), /* fp */
663 COSTS_N_INSNS (4), /* dmul */
664 COSTS_N_INSNS (29), /* sdiv */
665 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 666 32, /* cache line size */
5f732aba
DE
667 32, /* l1 cache */
668 256, /* l2 cache */
0b11da67 669 1, /* prefetch streams /*/
8b897cfa
RS
670};
671
fa41c305
EW
672/* Instruction costs on E300C2 and E300C3 cores. */
673static const
674struct processor_costs ppce300c2c3_cost = {
675 COSTS_N_INSNS (4), /* mulsi */
676 COSTS_N_INSNS (4), /* mulsi_const */
677 COSTS_N_INSNS (4), /* mulsi_const9 */
678 COSTS_N_INSNS (4), /* muldi */
679 COSTS_N_INSNS (19), /* divsi */
680 COSTS_N_INSNS (19), /* divdi */
681 COSTS_N_INSNS (3), /* fp */
682 COSTS_N_INSNS (4), /* dmul */
683 COSTS_N_INSNS (18), /* sdiv */
684 COSTS_N_INSNS (33), /* ddiv */
642639ce 685 32,
a19b7d46
EW
686 16, /* l1 cache */
687 16, /* l2 cache */
642639ce 688 1, /* prefetch streams /*/
fa41c305
EW
689};
690
8b897cfa
RS
691/* Instruction costs on POWER4 and POWER5 processors. */
692static const
693struct processor_costs power4_cost = {
06a67bdd
RS
694 COSTS_N_INSNS (3), /* mulsi */
695 COSTS_N_INSNS (2), /* mulsi_const */
696 COSTS_N_INSNS (2), /* mulsi_const9 */
697 COSTS_N_INSNS (4), /* muldi */
698 COSTS_N_INSNS (18), /* divsi */
699 COSTS_N_INSNS (34), /* divdi */
700 COSTS_N_INSNS (3), /* fp */
701 COSTS_N_INSNS (3), /* dmul */
702 COSTS_N_INSNS (17), /* sdiv */
703 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 704 128, /* cache line size */
5f732aba
DE
705 32, /* l1 cache */
706 1024, /* l2 cache */
0b11da67 707 8, /* prefetch streams /*/
8b897cfa
RS
708};
709
44cd321e
PS
710/* Instruction costs on POWER6 processors. */
711static const
712struct processor_costs power6_cost = {
713 COSTS_N_INSNS (8), /* mulsi */
714 COSTS_N_INSNS (8), /* mulsi_const */
715 COSTS_N_INSNS (8), /* mulsi_const9 */
716 COSTS_N_INSNS (8), /* muldi */
717 COSTS_N_INSNS (22), /* divsi */
718 COSTS_N_INSNS (28), /* divdi */
719 COSTS_N_INSNS (3), /* fp */
720 COSTS_N_INSNS (3), /* dmul */
721 COSTS_N_INSNS (13), /* sdiv */
722 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 723 128, /* cache line size */
5f732aba
DE
724 64, /* l1 cache */
725 2048, /* l2 cache */
0b11da67 726 16, /* prefetch streams */
44cd321e
PS
727};
728
8b897cfa 729\f
a2369ed3 730static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 731static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 732static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
733static void rs6000_emit_stack_tie (void);
734static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 735static bool spe_func_has_64bit_regs_p (void);
b20a9cca 736static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 737 int, HOST_WIDE_INT);
a2369ed3
DJ
738static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
739static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
740static unsigned rs6000_hash_constant (rtx);
741static unsigned toc_hash_function (const void *);
742static int toc_hash_eq (const void *, const void *);
743static int constant_pool_expr_1 (rtx, int *, int *);
744static bool constant_pool_expr_p (rtx);
d04b6e6e 745static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
746static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
747static struct machine_function * rs6000_init_machine_status (void);
748static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 749static bool no_global_regs_above (int);
5add3202 750#ifdef HAVE_GAS_HIDDEN
a2369ed3 751static void rs6000_assemble_visibility (tree, int);
5add3202 752#endif
a2369ed3
DJ
753static int rs6000_ra_ever_killed (void);
754static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 755static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 756static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 757static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 758static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 759static const char *rs6000_mangle_type (const_tree);
b86fe7b4 760extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 761static void rs6000_set_default_type_attributes (tree);
52ff33d0 762static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
763static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
764static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
765static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
766 tree);
a2369ed3 767static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 768static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 769static void rs6000_file_start (void);
7c262518 770#if TARGET_ELF
9b580a0b 771static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
772static void rs6000_elf_asm_out_constructor (rtx, int);
773static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 774static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 775static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
776static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
777 unsigned HOST_WIDE_INT);
a56d7372 778static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 779 ATTRIBUTE_UNUSED;
7c262518 780#endif
3101faab 781static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
782static void rs6000_alloc_sdmode_stack_slot (void);
783static void rs6000_instantiate_decls (void);
cbaaba19 784#if TARGET_XCOFF
0d5817b2 785static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 786static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 787static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 788static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 789static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 790static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 791 unsigned HOST_WIDE_INT);
d6b5193b
RS
792static void rs6000_xcoff_unique_section (tree, int);
793static section *rs6000_xcoff_select_rtx_section
794 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
795static const char * rs6000_xcoff_strip_name_encoding (const char *);
796static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
797static void rs6000_xcoff_file_start (void);
798static void rs6000_xcoff_file_end (void);
f1384257 799#endif
a2369ed3
DJ
800static int rs6000_variable_issue (FILE *, int, rtx, int);
801static bool rs6000_rtx_costs (rtx, int, int, int *);
802static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 803static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 804static bool is_microcoded_insn (rtx);
d296e02e 805static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
806static bool is_cracked_insn (rtx);
807static bool is_branch_slot_insn (rtx);
44cd321e 808static bool is_load_insn (rtx);
e3a0e200 809static rtx get_store_dest (rtx pat);
44cd321e
PS
810static bool is_store_insn (rtx);
811static bool set_to_load_agen (rtx,rtx);
982afe02 812static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
813static int rs6000_adjust_priority (rtx, int);
814static int rs6000_issue_rate (void);
b198261f 815static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
816static rtx get_next_active_insn (rtx, rtx);
817static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
818static bool insn_must_be_first_in_group (rtx);
819static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
820static bool is_costly_group (rtx *, rtx);
821static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
822static int redefine_groups (FILE *, int, rtx, rtx);
823static int pad_groups (FILE *, int, rtx, rtx);
824static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
825static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
826static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 827static int rs6000_use_sched_lookahead (void);
d296e02e 828static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 829static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 830static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
831static tree rs6000_builtin_mul_widen_even (tree);
832static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 833static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 834
58646b77 835static void def_builtin (int, const char *, tree, int);
3101faab 836static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
837static void rs6000_init_builtins (void);
838static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
839static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
840static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
841static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
842static void altivec_init_builtins (void);
843static void rs6000_common_init_builtins (void);
c15c90bb 844static void rs6000_init_libfuncs (void);
a2369ed3 845
96038623
DE
846static void paired_init_builtins (void);
847static rtx paired_expand_builtin (tree, rtx, bool *);
848static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
849static rtx paired_expand_stv_builtin (enum insn_code, tree);
850static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
851
b20a9cca
AM
852static void enable_mask_for_builtins (struct builtin_description *, int,
853 enum rs6000_builtins,
854 enum rs6000_builtins);
7c62e993 855static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
856static void spe_init_builtins (void);
857static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 858static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
859static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
860static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
861static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
862static rs6000_stack_t *rs6000_stack_info (void);
863static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
864
865static rtx altivec_expand_builtin (tree, rtx, bool *);
866static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
867static rtx altivec_expand_st_builtin (tree, rtx, bool *);
868static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
869static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 870static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 871 const char *, tree, rtx);
b4a62fa0 872static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 873static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
874static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
875static rtx altivec_expand_vec_set_builtin (tree);
876static rtx altivec_expand_vec_ext_builtin (tree, rtx);
877static int get_element_number (tree, tree);
78f5898b 878static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 879static void rs6000_parse_tls_size_option (void);
5da702b1 880static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
881static int first_altivec_reg_to_save (void);
882static unsigned int compute_vrsave_mask (void);
9390387d 883static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
884static void is_altivec_return_reg (rtx, void *);
885static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
886int easy_vector_constant (rtx, enum machine_mode);
3101faab 887static bool rs6000_is_opaque_type (const_tree);
a2369ed3 888static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 889static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 890static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 891static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
892static rtx rs6000_tls_get_addr (void);
893static rtx rs6000_got_sym (void);
9390387d 894static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
895static const char *rs6000_get_some_local_dynamic_name (void);
896static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 897static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 898static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 899 enum machine_mode, tree);
0b5383eb
DJ
900static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
901 HOST_WIDE_INT);
902static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
903 tree, HOST_WIDE_INT);
904static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
905 HOST_WIDE_INT,
906 rtx[], int *);
907static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
908 const_tree, HOST_WIDE_INT,
909 rtx[], int *);
910static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 911static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 912static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
913static void setup_incoming_varargs (CUMULATIVE_ARGS *,
914 enum machine_mode, tree,
915 int *, int);
8cd5a4e0 916static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 917 const_tree, bool);
78a52f11
RH
918static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
919 tree, bool);
3101faab 920static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
921#if TARGET_MACHO
922static void macho_branch_islands (void);
efdba735
SH
923static int no_previous_def (tree function_name);
924static tree get_prev_label (tree function_name);
c4e18b1c 925static void rs6000_darwin_file_start (void);
efdba735
SH
926#endif
927
c35d187f 928static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 929static void rs6000_va_start (tree, rtx);
23a60a04 930static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 931static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 932static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 933static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 934static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 935 enum machine_mode);
94ff898d 936static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
937 enum machine_mode);
938static int get_vsel_insn (enum machine_mode);
939static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 940static tree rs6000_stack_protect_fail (void);
21213b4c
DP
941
942const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
943static enum machine_mode rs6000_eh_return_filter_mode (void);
944
17211ab5
GK
945/* Hash table stuff for keeping track of TOC entries. */
946
947struct toc_hash_struct GTY(())
948{
949 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
950 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
951 rtx key;
952 enum machine_mode key_mode;
953 int labelno;
954};
955
956static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
957\f
958/* Default register names. */
959char rs6000_reg_names[][8] =
960{
802a0058
MM
961 "0", "1", "2", "3", "4", "5", "6", "7",
962 "8", "9", "10", "11", "12", "13", "14", "15",
963 "16", "17", "18", "19", "20", "21", "22", "23",
964 "24", "25", "26", "27", "28", "29", "30", "31",
965 "0", "1", "2", "3", "4", "5", "6", "7",
966 "8", "9", "10", "11", "12", "13", "14", "15",
967 "16", "17", "18", "19", "20", "21", "22", "23",
968 "24", "25", "26", "27", "28", "29", "30", "31",
969 "mq", "lr", "ctr","ap",
970 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
971 "xer",
972 /* AltiVec registers. */
0cd5e3a1
AH
973 "0", "1", "2", "3", "4", "5", "6", "7",
974 "8", "9", "10", "11", "12", "13", "14", "15",
975 "16", "17", "18", "19", "20", "21", "22", "23",
976 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
977 "vrsave", "vscr",
978 /* SPE registers. */
7d5175e1
JJ
979 "spe_acc", "spefscr",
980 /* Soft frame pointer. */
981 "sfp"
c81bebd7
MM
982};
983
984#ifdef TARGET_REGNAMES
8b60264b 985static const char alt_reg_names[][8] =
c81bebd7 986{
802a0058
MM
987 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
988 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
989 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
990 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
991 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
992 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
993 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
994 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
995 "mq", "lr", "ctr", "ap",
996 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 997 "xer",
59a4c851 998 /* AltiVec registers. */
0ac081f6 999 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1000 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1001 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1002 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1003 "vrsave", "vscr",
1004 /* SPE registers. */
7d5175e1
JJ
1005 "spe_acc", "spefscr",
1006 /* Soft frame pointer. */
1007 "sfp"
c81bebd7
MM
1008};
1009#endif
9878760c 1010\f
daf11973
MM
1011#ifndef MASK_STRICT_ALIGN
1012#define MASK_STRICT_ALIGN 0
1013#endif
ffcfcb5f
AM
1014#ifndef TARGET_PROFILE_KERNEL
1015#define TARGET_PROFILE_KERNEL 0
1016#endif
3961e8fe
RH
1017
1018/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1019#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1020\f
1021/* Initialize the GCC target structure. */
91d231cb
JM
1022#undef TARGET_ATTRIBUTE_TABLE
1023#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1024#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1025#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1026
301d03af
RS
1027#undef TARGET_ASM_ALIGNED_DI_OP
1028#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1029
1030/* Default unaligned ops are only provided for ELF. Find the ops needed
1031 for non-ELF systems. */
1032#ifndef OBJECT_FORMAT_ELF
cbaaba19 1033#if TARGET_XCOFF
ae6c1efd 1034/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1035 64-bit targets. */
1036#undef TARGET_ASM_UNALIGNED_HI_OP
1037#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1038#undef TARGET_ASM_UNALIGNED_SI_OP
1039#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1040#undef TARGET_ASM_UNALIGNED_DI_OP
1041#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1042#else
1043/* For Darwin. */
1044#undef TARGET_ASM_UNALIGNED_HI_OP
1045#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1046#undef TARGET_ASM_UNALIGNED_SI_OP
1047#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1048#undef TARGET_ASM_UNALIGNED_DI_OP
1049#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1050#undef TARGET_ASM_ALIGNED_DI_OP
1051#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1052#endif
1053#endif
1054
1055/* This hook deals with fixups for relocatable code and DI-mode objects
1056 in 64-bit code. */
1057#undef TARGET_ASM_INTEGER
1058#define TARGET_ASM_INTEGER rs6000_assemble_integer
1059
93638d7a
AM
1060#ifdef HAVE_GAS_HIDDEN
1061#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1062#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1063#endif
1064
c4501e62
JJ
1065#undef TARGET_HAVE_TLS
1066#define TARGET_HAVE_TLS HAVE_AS_TLS
1067
1068#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1069#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1070
08c148a8
NB
1071#undef TARGET_ASM_FUNCTION_PROLOGUE
1072#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1073#undef TARGET_ASM_FUNCTION_EPILOGUE
1074#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1075
b54cf83a
DE
1076#undef TARGET_SCHED_VARIABLE_ISSUE
1077#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1078
c237e94a
ZW
1079#undef TARGET_SCHED_ISSUE_RATE
1080#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1081#undef TARGET_SCHED_ADJUST_COST
1082#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1083#undef TARGET_SCHED_ADJUST_PRIORITY
1084#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1085#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1086#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1087#undef TARGET_SCHED_INIT
1088#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1089#undef TARGET_SCHED_FINISH
1090#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1091#undef TARGET_SCHED_REORDER
1092#define TARGET_SCHED_REORDER rs6000_sched_reorder
1093#undef TARGET_SCHED_REORDER2
1094#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1095
be12c2b0
VM
1096#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1097#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1098
d296e02e
AP
1099#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1100#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1101
7ccf35ed
DN
1102#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1103#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1104#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1105#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1106#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1107#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1108#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1109#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1110
5b900a4c
DN
1111#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1112#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1113
0ac081f6
AH
1114#undef TARGET_INIT_BUILTINS
1115#define TARGET_INIT_BUILTINS rs6000_init_builtins
1116
1117#undef TARGET_EXPAND_BUILTIN
1118#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1119
608063c3
JB
1120#undef TARGET_MANGLE_TYPE
1121#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1122
c15c90bb
ZW
1123#undef TARGET_INIT_LIBFUNCS
1124#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1125
f1384257 1126#if TARGET_MACHO
0e5dbd9b 1127#undef TARGET_BINDS_LOCAL_P
31920d83 1128#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1129#endif
0e5dbd9b 1130
77ccdfed
EC
1131#undef TARGET_MS_BITFIELD_LAYOUT_P
1132#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1133
3961e8fe
RH
1134#undef TARGET_ASM_OUTPUT_MI_THUNK
1135#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1136
3961e8fe 1137#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1138#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1139
4977bab6
ZW
1140#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1141#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1142
2e3f0db6
DJ
1143#undef TARGET_INVALID_WITHIN_DOLOOP
1144#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1145
3c50106f
RH
1146#undef TARGET_RTX_COSTS
1147#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1148#undef TARGET_ADDRESS_COST
1149#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1150
c8e4f0e9 1151#undef TARGET_VECTOR_OPAQUE_P
58646b77 1152#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1153
96714395
AH
1154#undef TARGET_DWARF_REGISTER_SPAN
1155#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1156
37ea0b7e
JM
1157#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1158#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1159
c6e8c921
GK
1160/* On rs6000, function arguments are promoted, as are function return
1161 values. */
1162#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1163#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1164#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1165#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1166
c6e8c921
GK
1167#undef TARGET_RETURN_IN_MEMORY
1168#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1169
1170#undef TARGET_SETUP_INCOMING_VARARGS
1171#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1172
1173/* Always strict argument naming on rs6000. */
1174#undef TARGET_STRICT_ARGUMENT_NAMING
1175#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1176#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1177#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1178#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1179#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1180#undef TARGET_MUST_PASS_IN_STACK
1181#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1182#undef TARGET_PASS_BY_REFERENCE
1183#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1184#undef TARGET_ARG_PARTIAL_BYTES
1185#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1186
c35d187f
RH
1187#undef TARGET_BUILD_BUILTIN_VA_LIST
1188#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1189
d7bd8aeb
JJ
1190#undef TARGET_EXPAND_BUILTIN_VA_START
1191#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1192
cd3ce9b4
JM
1193#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1194#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1195
93f90be6
FJ
1196#undef TARGET_EH_RETURN_FILTER_MODE
1197#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1198
00b79d54
BE
1199#undef TARGET_SCALAR_MODE_SUPPORTED_P
1200#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1201
f676971a
EC
1202#undef TARGET_VECTOR_MODE_SUPPORTED_P
1203#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1204
4d3e6fae
FJ
1205#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1206#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1207
78f5898b
AH
1208#undef TARGET_HANDLE_OPTION
1209#define TARGET_HANDLE_OPTION rs6000_handle_option
1210
1211#undef TARGET_DEFAULT_TARGET_FLAGS
1212#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1213 (TARGET_DEFAULT)
78f5898b 1214
3aebbe5f
JJ
1215#undef TARGET_STACK_PROTECT_FAIL
1216#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1217
445cf5eb
JM
1218/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1219 The PowerPC architecture requires only weak consistency among
1220 processors--that is, memory accesses between processors need not be
1221 sequentially consistent and memory accesses among processors can occur
1222 in any order. The ability to order memory accesses weakly provides
1223 opportunities for more efficient use of the system bus. Unless a
1224 dependency exists, the 604e allows read operations to precede store
1225 operations. */
1226#undef TARGET_RELAXED_ORDERING
1227#define TARGET_RELAXED_ORDERING true
1228
fdbe66f2
EB
1229#ifdef HAVE_AS_TLS
1230#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1231#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1232#endif
1233
aacd3885
RS
1234/* Use a 32-bit anchor range. This leads to sequences like:
1235
1236 addis tmp,anchor,high
1237 add dest,tmp,low
1238
1239 where tmp itself acts as an anchor, and can be shared between
1240 accesses to the same 64k page. */
1241#undef TARGET_MIN_ANCHOR_OFFSET
1242#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1243#undef TARGET_MAX_ANCHOR_OFFSET
1244#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1245#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1246#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1247
9c78b944
DE
1248#undef TARGET_BUILTIN_RECIPROCAL
1249#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1250
e41b2a33
PB
1251#undef TARGET_EXPAND_TO_RTL_HOOK
1252#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1253
1254#undef TARGET_INSTANTIATE_DECLS
1255#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1256
f6897b10 1257struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1258\f
0d1fbc8c
AH
1259
1260/* Value is 1 if hard register REGNO can hold a value of machine-mode
1261 MODE. */
1262static int
1263rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1264{
1265 /* The GPRs can hold any mode, but values bigger than one register
1266 cannot go past R31. */
1267 if (INT_REGNO_P (regno))
1268 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1269
a5a97921 1270 /* The float registers can only hold floating modes and DImode.
7393f7f8 1271 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1272 if (FP_REGNO_P (regno))
1273 return
96038623 1274 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1275 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1276 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1277 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1278 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1279 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1280 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1281
1282 /* The CR register can only hold CC modes. */
1283 if (CR_REGNO_P (regno))
1284 return GET_MODE_CLASS (mode) == MODE_CC;
1285
1286 if (XER_REGNO_P (regno))
1287 return mode == PSImode;
1288
1289 /* AltiVec only in AldyVec registers. */
1290 if (ALTIVEC_REGNO_P (regno))
1291 return ALTIVEC_VECTOR_MODE (mode);
1292
1293 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1294 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1295 return 1;
1296
1297 /* We cannot put TImode anywhere except general register and it must be
1298 able to fit within the register set. */
1299
1300 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1301}
1302
1303/* Initialize rs6000_hard_regno_mode_ok_p table. */
1304static void
1305rs6000_init_hard_regno_mode_ok (void)
1306{
1307 int r, m;
1308
1309 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1310 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1311 if (rs6000_hard_regno_mode_ok (r, m))
1312 rs6000_hard_regno_mode_ok_p[m][r] = true;
1313}
1314
e4cad568
GK
1315#if TARGET_MACHO
1316/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1317
1318static void
1319darwin_rs6000_override_options (void)
1320{
1321 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1322 off. */
1323 rs6000_altivec_abi = 1;
1324 TARGET_ALTIVEC_VRSAVE = 1;
1325 if (DEFAULT_ABI == ABI_DARWIN)
1326 {
1327 if (MACHO_DYNAMIC_NO_PIC_P)
1328 {
1329 if (flag_pic)
1330 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1331 flag_pic = 0;
1332 }
1333 else if (flag_pic == 1)
1334 {
1335 flag_pic = 2;
1336 }
1337 }
1338 if (TARGET_64BIT && ! TARGET_POWERPC64)
1339 {
1340 target_flags |= MASK_POWERPC64;
1341 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1342 }
1343 if (flag_mkernel)
1344 {
1345 rs6000_default_long_calls = 1;
1346 target_flags |= MASK_SOFT_FLOAT;
1347 }
1348
1349 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1350 Altivec. */
1351 if (!flag_mkernel && !flag_apple_kext
1352 && TARGET_64BIT
1353 && ! (target_flags_explicit & MASK_ALTIVEC))
1354 target_flags |= MASK_ALTIVEC;
1355
1356 /* Unless the user (not the configurer) has explicitly overridden
1357 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1358 G4 unless targetting the kernel. */
1359 if (!flag_mkernel
1360 && !flag_apple_kext
1361 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1362 && ! (target_flags_explicit & MASK_ALTIVEC)
1363 && ! rs6000_select[1].string)
1364 {
1365 target_flags |= MASK_ALTIVEC;
1366 }
1367}
1368#endif
1369
c1e55850
GK
1370/* If not otherwise specified by a target, make 'long double' equivalent to
1371 'double'. */
1372
1373#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1374#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1375#endif
1376
5248c961
RK
1377/* Override command line options. Mostly we process the processor
1378 type and sometimes adjust other TARGET_ options. */
1379
1380void
d779d0dc 1381rs6000_override_options (const char *default_cpu)
5248c961 1382{
c4d38ccb 1383 size_t i, j;
8e3f41e7 1384 struct rs6000_cpu_select *ptr;
66188a7e 1385 int set_masks;
5248c961 1386
66188a7e 1387 /* Simplifications for entries below. */
85638c0d 1388
66188a7e
GK
1389 enum {
1390 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1391 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1392 };
85638c0d 1393
66188a7e
GK
1394 /* This table occasionally claims that a processor does not support
1395 a particular feature even though it does, but the feature is slower
1396 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1397 complete description of the processor's support.
66188a7e
GK
1398
1399 Please keep this list in order, and don't forget to update the
1400 documentation in invoke.texi when adding a new processor or
1401 flag. */
5248c961
RK
1402 static struct ptt
1403 {
8b60264b
KG
1404 const char *const name; /* Canonical processor name. */
1405 const enum processor_type processor; /* Processor type enum value. */
1406 const int target_enable; /* Target flags to enable. */
8b60264b 1407 } const processor_target_table[]
66188a7e 1408 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1409 {"403", PROCESSOR_PPC403,
66188a7e 1410 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1411 {"405", PROCESSOR_PPC405,
716019c0
JM
1412 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1413 {"405fp", PROCESSOR_PPC405,
1414 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1415 {"440", PROCESSOR_PPC440,
716019c0
JM
1416 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1417 {"440fp", PROCESSOR_PPC440,
1418 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1419 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1420 {"601", PROCESSOR_PPC601,
66188a7e
GK
1421 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1422 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1423 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1424 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1425 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1426 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1427 {"620", PROCESSOR_PPC620,
1428 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1429 {"630", PROCESSOR_PPC630,
1430 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1431 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1432 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1433 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1434 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1435 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1436 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1437 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1438 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1439 /* 8548 has a dummy entry for now. */
a45bce6e 1440 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1441 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1442 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
66188a7e 1443 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1444 {"970", PROCESSOR_POWER4,
66188a7e 1445 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1446 {"cell", PROCESSOR_CELL,
1447 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1448 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1449 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1450 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1451 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1452 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1453 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1454 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1455 {"power2", PROCESSOR_POWER,
1456 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1457 {"power3", PROCESSOR_PPC630,
1458 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1459 {"power4", PROCESSOR_POWER4,
fc091c8e 1460 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1461 {"power5", PROCESSOR_POWER5,
432218ba
DE
1462 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1463 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1464 {"power5+", PROCESSOR_POWER5,
1465 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1466 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1467 {"power6", PROCESSOR_POWER6,
e118597e 1468 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1469 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1470 {"power6x", PROCESSOR_POWER6,
1471 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1472 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1473 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1474 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1476 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1477 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1478 {"rios2", PROCESSOR_RIOS2,
1479 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1480 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1481 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1482 {"rs64", PROCESSOR_RS64A,
1483 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1484 };
5248c961 1485
ca7558fc 1486 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1487
66188a7e
GK
1488 /* Some OSs don't support saving the high part of 64-bit registers on
1489 context switch. Other OSs don't support saving Altivec registers.
1490 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1491 settings; if the user wants either, the user must explicitly specify
1492 them and we won't interfere with the user's specification. */
1493
1494 enum {
1495 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1496 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1497 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1498 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1499 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1500 };
0d1fbc8c
AH
1501
1502 rs6000_init_hard_regno_mode_ok ();
1503
c4ad648e 1504 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1505#ifdef OS_MISSING_POWERPC64
1506 if (OS_MISSING_POWERPC64)
1507 set_masks &= ~MASK_POWERPC64;
1508#endif
1509#ifdef OS_MISSING_ALTIVEC
1510 if (OS_MISSING_ALTIVEC)
1511 set_masks &= ~MASK_ALTIVEC;
1512#endif
1513
768875a8
AM
1514 /* Don't override by the processor default if given explicitly. */
1515 set_masks &= ~target_flags_explicit;
957211c3 1516
a4f6c312 1517 /* Identify the processor type. */
8e3f41e7 1518 rs6000_select[0].string = default_cpu;
3cb999d8 1519 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1520
b6a1cbae 1521 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1522 {
8e3f41e7
MM
1523 ptr = &rs6000_select[i];
1524 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1525 {
8e3f41e7
MM
1526 for (j = 0; j < ptt_size; j++)
1527 if (! strcmp (ptr->string, processor_target_table[j].name))
1528 {
1529 if (ptr->set_tune_p)
1530 rs6000_cpu = processor_target_table[j].processor;
1531
1532 if (ptr->set_arch_p)
1533 {
66188a7e
GK
1534 target_flags &= ~set_masks;
1535 target_flags |= (processor_target_table[j].target_enable
1536 & set_masks);
8e3f41e7
MM
1537 }
1538 break;
1539 }
1540
4406229e 1541 if (j == ptt_size)
8e3f41e7 1542 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1543 }
1544 }
8a61d227 1545
993f19a8 1546 if (TARGET_E500)
a3170dc6
AH
1547 rs6000_isel = 1;
1548
fa41c305
EW
1549 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3)
1550 {
1551 if (TARGET_ALTIVEC)
1552 error ("AltiVec not supported in this target");
1553 if (TARGET_SPE)
1554 error ("Spe not supported in this target");
1555 }
1556
dff9f1b6
DE
1557 /* If we are optimizing big endian systems for space, use the load/store
1558 multiple and string instructions. */
ef792183 1559 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1560 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1561
a4f6c312
SS
1562 /* Don't allow -mmultiple or -mstring on little endian systems
1563 unless the cpu is a 750, because the hardware doesn't support the
1564 instructions used in little endian mode, and causes an alignment
1565 trap. The 750 does not cause an alignment trap (except when the
1566 target is unaligned). */
bef84347 1567
b21fb038 1568 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1569 {
1570 if (TARGET_MULTIPLE)
1571 {
1572 target_flags &= ~MASK_MULTIPLE;
b21fb038 1573 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1574 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1575 }
1576
1577 if (TARGET_STRING)
1578 {
1579 target_flags &= ~MASK_STRING;
b21fb038 1580 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1581 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1582 }
1583 }
3933e0e1 1584
38c1f2d7
MM
1585 /* Set debug flags */
1586 if (rs6000_debug_name)
1587 {
bfc79d3b 1588 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1589 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1590 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1591 rs6000_debug_stack = 1;
bfc79d3b 1592 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1593 rs6000_debug_arg = 1;
1594 else
c725bd79 1595 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1596 }
1597
57ac7be9
AM
1598 if (rs6000_traceback_name)
1599 {
1600 if (! strncmp (rs6000_traceback_name, "full", 4))
1601 rs6000_traceback = traceback_full;
1602 else if (! strncmp (rs6000_traceback_name, "part", 4))
1603 rs6000_traceback = traceback_part;
1604 else if (! strncmp (rs6000_traceback_name, "no", 2))
1605 rs6000_traceback = traceback_none;
1606 else
9e637a26 1607 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1608 rs6000_traceback_name);
1609 }
1610
78f5898b
AH
1611 if (!rs6000_explicit_options.long_double)
1612 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1613
602ea4d3 1614#ifndef POWERPC_LINUX
d3603e8c 1615 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1616 rs6000_ieeequad = 1;
1617#endif
1618
0db747be
DE
1619 /* Enable Altivec ABI for AIX -maltivec. */
1620 if (TARGET_XCOFF && TARGET_ALTIVEC)
1621 rs6000_altivec_abi = 1;
1622
a2db2771
JJ
1623 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1624 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1625 be explicitly overridden in either case. */
1626 if (TARGET_ELF)
6d0ef01e 1627 {
a2db2771
JJ
1628 if (!rs6000_explicit_options.altivec_abi
1629 && (TARGET_64BIT || TARGET_ALTIVEC))
1630 rs6000_altivec_abi = 1;
1631
1632 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1633 if (!rs6000_explicit_options.vrsave)
1634 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1635 }
1636
594a51fe
SS
1637 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1638 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1639 {
1640 rs6000_darwin64_abi = 1;
9c7956fd 1641#if TARGET_MACHO
6ac49599 1642 darwin_one_byte_bool = 1;
9c7956fd 1643#endif
d9168963
SS
1644 /* Default to natural alignment, for better performance. */
1645 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1646 }
1647
194c524a
DE
1648 /* Place FP constants in the constant pool instead of TOC
1649 if section anchors enabled. */
1650 if (flag_section_anchors)
1651 TARGET_NO_FP_IN_TOC = 1;
1652
c4501e62
JJ
1653 /* Handle -mtls-size option. */
1654 rs6000_parse_tls_size_option ();
1655
a7ae18e2
AH
1656#ifdef SUBTARGET_OVERRIDE_OPTIONS
1657 SUBTARGET_OVERRIDE_OPTIONS;
1658#endif
1659#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1660 SUBSUBTARGET_OVERRIDE_OPTIONS;
1661#endif
4d4cbc0e
AH
1662#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1663 SUB3TARGET_OVERRIDE_OPTIONS;
1664#endif
a7ae18e2 1665
5da702b1
AH
1666 if (TARGET_E500)
1667 {
1668 /* The e500 does not have string instructions, and we set
1669 MASK_STRING above when optimizing for size. */
1670 if ((target_flags & MASK_STRING) != 0)
1671 target_flags = target_flags & ~MASK_STRING;
1672 }
1673 else if (rs6000_select[1].string != NULL)
1674 {
1675 /* For the powerpc-eabispe configuration, we set all these by
1676 default, so let's unset them if we manually set another
1677 CPU that is not the E500. */
a2db2771 1678 if (!rs6000_explicit_options.spe_abi)
5da702b1 1679 rs6000_spe_abi = 0;
78f5898b 1680 if (!rs6000_explicit_options.spe)
5da702b1 1681 rs6000_spe = 0;
78f5898b 1682 if (!rs6000_explicit_options.float_gprs)
5da702b1 1683 rs6000_float_gprs = 0;
78f5898b 1684 if (!rs6000_explicit_options.isel)
5da702b1
AH
1685 rs6000_isel = 0;
1686 }
b5044283 1687
eca0d5e8
JM
1688 /* Detect invalid option combinations with E500. */
1689 CHECK_E500_OPTIONS;
1690
ec507f2d 1691 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1692 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1693 && rs6000_cpu != PROCESSOR_POWER6
1694 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1695 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1696 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1697 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1698 || rs6000_cpu == PROCESSOR_POWER5
1699 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1700
ec507f2d
DE
1701 rs6000_sched_restricted_insns_priority
1702 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1703
569fa502 1704 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1705 rs6000_sched_costly_dep
1706 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1707
569fa502
DN
1708 if (rs6000_sched_costly_dep_str)
1709 {
f676971a 1710 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1711 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1712 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1713 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1714 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1715 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1716 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1717 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1718 else
c4ad648e 1719 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1720 }
1721
1722 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1723 rs6000_sched_insert_nops
1724 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1725
cbe26ab8
DN
1726 if (rs6000_sched_insert_nops_str)
1727 {
1728 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1729 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1730 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1731 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1732 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1733 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1734 else
c4ad648e 1735 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1736 }
1737
c81bebd7 1738#ifdef TARGET_REGNAMES
a4f6c312
SS
1739 /* If the user desires alternate register names, copy in the
1740 alternate names now. */
c81bebd7 1741 if (TARGET_REGNAMES)
4e135bdd 1742 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1743#endif
1744
df01da37 1745 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1746 If -maix-struct-return or -msvr4-struct-return was explicitly
1747 used, don't override with the ABI default. */
df01da37
DE
1748 if (!rs6000_explicit_options.aix_struct_ret)
1749 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1750
602ea4d3 1751 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1752 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1753
f676971a 1754 if (TARGET_TOC)
9ebbca7d 1755 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1756
301d03af
RS
1757 /* We can only guarantee the availability of DI pseudo-ops when
1758 assembling for 64-bit targets. */
ae6c1efd 1759 if (!TARGET_64BIT)
301d03af
RS
1760 {
1761 targetm.asm_out.aligned_op.di = NULL;
1762 targetm.asm_out.unaligned_op.di = NULL;
1763 }
1764
1494c534
DE
1765 /* Set branch target alignment, if not optimizing for size. */
1766 if (!optimize_size)
1767 {
d296e02e
AP
1768 /* Cell wants to be aligned 8byte for dual issue. */
1769 if (rs6000_cpu == PROCESSOR_CELL)
1770 {
1771 if (align_functions <= 0)
1772 align_functions = 8;
1773 if (align_jumps <= 0)
1774 align_jumps = 8;
1775 if (align_loops <= 0)
1776 align_loops = 8;
1777 }
44cd321e 1778 if (rs6000_align_branch_targets)
1494c534
DE
1779 {
1780 if (align_functions <= 0)
1781 align_functions = 16;
1782 if (align_jumps <= 0)
1783 align_jumps = 16;
1784 if (align_loops <= 0)
1785 align_loops = 16;
1786 }
1787 if (align_jumps_max_skip <= 0)
1788 align_jumps_max_skip = 15;
1789 if (align_loops_max_skip <= 0)
1790 align_loops_max_skip = 15;
1791 }
2792d578 1792
71f123ca
FS
1793 /* Arrange to save and restore machine status around nested functions. */
1794 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1795
1796 /* We should always be splitting complex arguments, but we can't break
1797 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1798 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1799 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1800
1801 /* Initialize rs6000_cost with the appropriate target costs. */
1802 if (optimize_size)
1803 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1804 else
1805 switch (rs6000_cpu)
1806 {
1807 case PROCESSOR_RIOS1:
1808 rs6000_cost = &rios1_cost;
1809 break;
1810
1811 case PROCESSOR_RIOS2:
1812 rs6000_cost = &rios2_cost;
1813 break;
1814
1815 case PROCESSOR_RS64A:
1816 rs6000_cost = &rs64a_cost;
1817 break;
1818
1819 case PROCESSOR_MPCCORE:
1820 rs6000_cost = &mpccore_cost;
1821 break;
1822
1823 case PROCESSOR_PPC403:
1824 rs6000_cost = &ppc403_cost;
1825 break;
1826
1827 case PROCESSOR_PPC405:
1828 rs6000_cost = &ppc405_cost;
1829 break;
1830
1831 case PROCESSOR_PPC440:
1832 rs6000_cost = &ppc440_cost;
1833 break;
1834
1835 case PROCESSOR_PPC601:
1836 rs6000_cost = &ppc601_cost;
1837 break;
1838
1839 case PROCESSOR_PPC603:
1840 rs6000_cost = &ppc603_cost;
1841 break;
1842
1843 case PROCESSOR_PPC604:
1844 rs6000_cost = &ppc604_cost;
1845 break;
1846
1847 case PROCESSOR_PPC604e:
1848 rs6000_cost = &ppc604e_cost;
1849 break;
1850
1851 case PROCESSOR_PPC620:
8b897cfa
RS
1852 rs6000_cost = &ppc620_cost;
1853 break;
1854
f0517163
RS
1855 case PROCESSOR_PPC630:
1856 rs6000_cost = &ppc630_cost;
1857 break;
1858
982afe02 1859 case PROCESSOR_CELL:
d296e02e
AP
1860 rs6000_cost = &ppccell_cost;
1861 break;
1862
8b897cfa
RS
1863 case PROCESSOR_PPC750:
1864 case PROCESSOR_PPC7400:
1865 rs6000_cost = &ppc750_cost;
1866 break;
1867
1868 case PROCESSOR_PPC7450:
1869 rs6000_cost = &ppc7450_cost;
1870 break;
1871
1872 case PROCESSOR_PPC8540:
1873 rs6000_cost = &ppc8540_cost;
1874 break;
1875
fa41c305
EW
1876 case PROCESSOR_PPCE300C2:
1877 case PROCESSOR_PPCE300C3:
1878 rs6000_cost = &ppce300c2c3_cost;
1879 break;
1880
8b897cfa
RS
1881 case PROCESSOR_POWER4:
1882 case PROCESSOR_POWER5:
1883 rs6000_cost = &power4_cost;
1884 break;
1885
44cd321e
PS
1886 case PROCESSOR_POWER6:
1887 rs6000_cost = &power6_cost;
1888 break;
1889
8b897cfa 1890 default:
37409796 1891 gcc_unreachable ();
8b897cfa 1892 }
0b11da67
DE
1893
1894 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1895 set_param_value ("simultaneous-prefetches",
1896 rs6000_cost->simultaneous_prefetches);
1897 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1898 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1899 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1900 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1901 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1902 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1903
1904 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1905 can be optimized to ap = __builtin_next_arg (0). */
1906 if (DEFAULT_ABI != ABI_V4)
1907 targetm.expand_builtin_va_start = NULL;
5248c961 1908}
5accd822 1909
7ccf35ed
DN
1910/* Implement targetm.vectorize.builtin_mask_for_load. */
1911static tree
1912rs6000_builtin_mask_for_load (void)
1913{
1914 if (TARGET_ALTIVEC)
1915 return altivec_builtin_mask_for_load;
1916 else
1917 return 0;
1918}
1919
f57d17f1
TM
1920/* Implement targetm.vectorize.builtin_conversion. */
1921static tree
1922rs6000_builtin_conversion (enum tree_code code, tree type)
1923{
1924 if (!TARGET_ALTIVEC)
1925 return NULL_TREE;
982afe02 1926
f57d17f1
TM
1927 switch (code)
1928 {
1929 case FLOAT_EXPR:
1930 switch (TYPE_MODE (type))
1931 {
1932 case V4SImode:
982afe02 1933 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1934 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1935 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1936 default:
1937 return NULL_TREE;
1938 }
1939 default:
1940 return NULL_TREE;
1941 }
1942}
1943
89d67cca
DN
1944/* Implement targetm.vectorize.builtin_mul_widen_even. */
1945static tree
1946rs6000_builtin_mul_widen_even (tree type)
1947{
1948 if (!TARGET_ALTIVEC)
1949 return NULL_TREE;
1950
1951 switch (TYPE_MODE (type))
1952 {
1953 case V8HImode:
982afe02 1954 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1955 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1956 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1957
1958 case V16QImode:
1959 return TYPE_UNSIGNED (type) ?
1960 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1961 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1962 default:
1963 return NULL_TREE;
1964 }
1965}
1966
1967/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1968static tree
1969rs6000_builtin_mul_widen_odd (tree type)
1970{
1971 if (!TARGET_ALTIVEC)
1972 return NULL_TREE;
1973
1974 switch (TYPE_MODE (type))
1975 {
1976 case V8HImode:
1977 return TYPE_UNSIGNED (type) ?
1978 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1979 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1980
1981 case V16QImode:
1982 return TYPE_UNSIGNED (type) ?
1983 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1984 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1985 default:
1986 return NULL_TREE;
1987 }
1988}
1989
5b900a4c
DN
1990
1991/* Return true iff, data reference of TYPE can reach vector alignment (16)
1992 after applying N number of iterations. This routine does not determine
1993 how may iterations are required to reach desired alignment. */
1994
1995static bool
3101faab 1996rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1997{
1998 if (is_packed)
1999 return false;
2000
2001 if (TARGET_32BIT)
2002 {
2003 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2004 return true;
2005
2006 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2007 return true;
2008
2009 return false;
2010 }
2011 else
2012 {
2013 if (TARGET_MACHO)
2014 return false;
2015
2016 /* Assuming that all other types are naturally aligned. CHECKME! */
2017 return true;
2018 }
2019}
2020
5da702b1
AH
2021/* Handle generic options of the form -mfoo=yes/no.
2022 NAME is the option name.
2023 VALUE is the option value.
2024 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2025 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2026static void
5da702b1 2027rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2028{
5da702b1 2029 if (value == 0)
993f19a8 2030 return;
5da702b1
AH
2031 else if (!strcmp (value, "yes"))
2032 *flag = 1;
2033 else if (!strcmp (value, "no"))
2034 *flag = 0;
08b57fb3 2035 else
5da702b1 2036 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2037}
2038
c4501e62
JJ
2039/* Validate and record the size specified with the -mtls-size option. */
2040
2041static void
863d938c 2042rs6000_parse_tls_size_option (void)
c4501e62
JJ
2043{
2044 if (rs6000_tls_size_string == 0)
2045 return;
2046 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2047 rs6000_tls_size = 16;
2048 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2049 rs6000_tls_size = 32;
2050 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2051 rs6000_tls_size = 64;
2052 else
9e637a26 2053 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2054}
2055
5accd822 2056void
a2369ed3 2057optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2058{
2e3f0db6
DJ
2059 if (DEFAULT_ABI == ABI_DARWIN)
2060 /* The Darwin libraries never set errno, so we might as well
2061 avoid calling them when that's the only reason we would. */
2062 flag_errno_math = 0;
59d6560b
DE
2063
2064 /* Double growth factor to counter reduced min jump length. */
2065 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2066
2067 /* Enable section anchors by default.
2068 Skip section anchors for Objective C and Objective C++
2069 until front-ends fixed. */
23f99493 2070 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2071 flag_section_anchors = 1;
5accd822 2072}
78f5898b
AH
2073
2074/* Implement TARGET_HANDLE_OPTION. */
2075
2076static bool
2077rs6000_handle_option (size_t code, const char *arg, int value)
2078{
2079 switch (code)
2080 {
2081 case OPT_mno_power:
2082 target_flags &= ~(MASK_POWER | MASK_POWER2
2083 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2084 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2085 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2086 break;
2087 case OPT_mno_powerpc:
2088 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2089 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2090 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2091 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2092 break;
2093 case OPT_mfull_toc:
d2894ab5
DE
2094 target_flags &= ~MASK_MINIMAL_TOC;
2095 TARGET_NO_FP_IN_TOC = 0;
2096 TARGET_NO_SUM_IN_TOC = 0;
2097 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2098#ifdef TARGET_USES_SYSV4_OPT
2099 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2100 just the same as -mminimal-toc. */
2101 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2102 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2103#endif
2104 break;
2105
2106#ifdef TARGET_USES_SYSV4_OPT
2107 case OPT_mtoc:
2108 /* Make -mtoc behave like -mminimal-toc. */
2109 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2110 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2111 break;
2112#endif
2113
2114#ifdef TARGET_USES_AIX64_OPT
2115 case OPT_maix64:
2116#else
2117 case OPT_m64:
2118#endif
2c9c9afd
AM
2119 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2120 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2121 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2122 break;
2123
2124#ifdef TARGET_USES_AIX64_OPT
2125 case OPT_maix32:
2126#else
2127 case OPT_m32:
2128#endif
2129 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2130 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2131 break;
2132
2133 case OPT_minsert_sched_nops_:
2134 rs6000_sched_insert_nops_str = arg;
2135 break;
2136
2137 case OPT_mminimal_toc:
2138 if (value == 1)
2139 {
d2894ab5
DE
2140 TARGET_NO_FP_IN_TOC = 0;
2141 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2142 }
2143 break;
2144
2145 case OPT_mpower:
2146 if (value == 1)
c2dba4ab
AH
2147 {
2148 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2149 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2150 }
78f5898b
AH
2151 break;
2152
2153 case OPT_mpower2:
2154 if (value == 1)
c2dba4ab
AH
2155 {
2156 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2157 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2158 }
78f5898b
AH
2159 break;
2160
2161 case OPT_mpowerpc_gpopt:
2162 case OPT_mpowerpc_gfxopt:
2163 if (value == 1)
c2dba4ab
AH
2164 {
2165 target_flags |= MASK_POWERPC;
2166 target_flags_explicit |= MASK_POWERPC;
2167 }
78f5898b
AH
2168 break;
2169
df01da37
DE
2170 case OPT_maix_struct_return:
2171 case OPT_msvr4_struct_return:
2172 rs6000_explicit_options.aix_struct_ret = true;
2173 break;
2174
78f5898b 2175 case OPT_mvrsave_:
a2db2771 2176 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2177 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2178 break;
78f5898b
AH
2179
2180 case OPT_misel_:
2181 rs6000_explicit_options.isel = true;
2182 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2183 break;
2184
2185 case OPT_mspe_:
2186 rs6000_explicit_options.spe = true;
2187 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2188 break;
2189
2190 case OPT_mdebug_:
2191 rs6000_debug_name = arg;
2192 break;
2193
2194#ifdef TARGET_USES_SYSV4_OPT
2195 case OPT_mcall_:
2196 rs6000_abi_name = arg;
2197 break;
2198
2199 case OPT_msdata_:
2200 rs6000_sdata_name = arg;
2201 break;
2202
2203 case OPT_mtls_size_:
2204 rs6000_tls_size_string = arg;
2205 break;
2206
2207 case OPT_mrelocatable:
2208 if (value == 1)
c2dba4ab 2209 {
e0bf274f
AM
2210 target_flags |= MASK_MINIMAL_TOC;
2211 target_flags_explicit |= MASK_MINIMAL_TOC;
2212 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2213 }
78f5898b
AH
2214 break;
2215
2216 case OPT_mrelocatable_lib:
2217 if (value == 1)
c2dba4ab 2218 {
e0bf274f
AM
2219 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2220 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2221 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2222 }
78f5898b 2223 else
c2dba4ab
AH
2224 {
2225 target_flags &= ~MASK_RELOCATABLE;
2226 target_flags_explicit |= MASK_RELOCATABLE;
2227 }
78f5898b
AH
2228 break;
2229#endif
2230
2231 case OPT_mabi_:
78f5898b
AH
2232 if (!strcmp (arg, "altivec"))
2233 {
a2db2771 2234 rs6000_explicit_options.altivec_abi = true;
78f5898b 2235 rs6000_altivec_abi = 1;
a2db2771
JJ
2236
2237 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2238 rs6000_spe_abi = 0;
2239 }
2240 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2241 {
a2db2771 2242 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2243 rs6000_altivec_abi = 0;
2244 }
78f5898b
AH
2245 else if (! strcmp (arg, "spe"))
2246 {
a2db2771 2247 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2248 rs6000_spe_abi = 1;
2249 rs6000_altivec_abi = 0;
2250 if (!TARGET_SPE_ABI)
2251 error ("not configured for ABI: '%s'", arg);
2252 }
2253 else if (! strcmp (arg, "no-spe"))
d3603e8c 2254 {
a2db2771 2255 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2256 rs6000_spe_abi = 0;
2257 }
78f5898b
AH
2258
2259 /* These are here for testing during development only, do not
2260 document in the manual please. */
2261 else if (! strcmp (arg, "d64"))
2262 {
2263 rs6000_darwin64_abi = 1;
2264 warning (0, "Using darwin64 ABI");
2265 }
2266 else if (! strcmp (arg, "d32"))
2267 {
2268 rs6000_darwin64_abi = 0;
2269 warning (0, "Using old darwin ABI");
2270 }
2271
602ea4d3
JJ
2272 else if (! strcmp (arg, "ibmlongdouble"))
2273 {
d3603e8c 2274 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2275 rs6000_ieeequad = 0;
2276 warning (0, "Using IBM extended precision long double");
2277 }
2278 else if (! strcmp (arg, "ieeelongdouble"))
2279 {
d3603e8c 2280 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2281 rs6000_ieeequad = 1;
2282 warning (0, "Using IEEE extended precision long double");
2283 }
2284
78f5898b
AH
2285 else
2286 {
2287 error ("unknown ABI specified: '%s'", arg);
2288 return false;
2289 }
2290 break;
2291
2292 case OPT_mcpu_:
2293 rs6000_select[1].string = arg;
2294 break;
2295
2296 case OPT_mtune_:
2297 rs6000_select[2].string = arg;
2298 break;
2299
2300 case OPT_mtraceback_:
2301 rs6000_traceback_name = arg;
2302 break;
2303
2304 case OPT_mfloat_gprs_:
2305 rs6000_explicit_options.float_gprs = true;
2306 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2307 rs6000_float_gprs = 1;
2308 else if (! strcmp (arg, "double"))
2309 rs6000_float_gprs = 2;
2310 else if (! strcmp (arg, "no"))
2311 rs6000_float_gprs = 0;
2312 else
2313 {
2314 error ("invalid option for -mfloat-gprs: '%s'", arg);
2315 return false;
2316 }
2317 break;
2318
2319 case OPT_mlong_double_:
2320 rs6000_explicit_options.long_double = true;
2321 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2322 if (value != 64 && value != 128)
2323 {
2324 error ("Unknown switch -mlong-double-%s", arg);
2325 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2326 return false;
2327 }
2328 else
2329 rs6000_long_double_type_size = value;
2330 break;
2331
2332 case OPT_msched_costly_dep_:
2333 rs6000_sched_costly_dep_str = arg;
2334 break;
2335
2336 case OPT_malign_:
2337 rs6000_explicit_options.alignment = true;
2338 if (! strcmp (arg, "power"))
2339 {
2340 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2341 some C library functions, so warn about it. The flag may be
2342 useful for performance studies from time to time though, so
2343 don't disable it entirely. */
2344 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2345 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2346 " it is incompatible with the installed C and C++ libraries");
2347 rs6000_alignment_flags = MASK_ALIGN_POWER;
2348 }
2349 else if (! strcmp (arg, "natural"))
2350 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2351 else
2352 {
2353 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2354 return false;
2355 }
2356 break;
2357 }
2358 return true;
2359}
3cfa4909
MM
2360\f
2361/* Do anything needed at the start of the asm file. */
2362
1bc7c5b6 2363static void
863d938c 2364rs6000_file_start (void)
3cfa4909 2365{
c4d38ccb 2366 size_t i;
3cfa4909 2367 char buffer[80];
d330fd93 2368 const char *start = buffer;
3cfa4909 2369 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2370 const char *default_cpu = TARGET_CPU_DEFAULT;
2371 FILE *file = asm_out_file;
2372
2373 default_file_start ();
2374
2375#ifdef TARGET_BI_ARCH
2376 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2377 default_cpu = 0;
2378#endif
3cfa4909
MM
2379
2380 if (flag_verbose_asm)
2381 {
2382 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2383 rs6000_select[0].string = default_cpu;
2384
b6a1cbae 2385 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2386 {
2387 ptr = &rs6000_select[i];
2388 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2389 {
2390 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2391 start = "";
2392 }
2393 }
2394
9c6b4ed9 2395 if (PPC405_ERRATUM77)
b0bfee6e 2396 {
9c6b4ed9 2397 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2398 start = "";
2399 }
b0bfee6e 2400
b91da81f 2401#ifdef USING_ELFOS_H
3cfa4909
MM
2402 switch (rs6000_sdata)
2403 {
2404 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2405 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2406 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2407 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2408 }
2409
2410 if (rs6000_sdata && g_switch_value)
2411 {
307b599c
MK
2412 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2413 g_switch_value);
3cfa4909
MM
2414 start = "";
2415 }
2416#endif
2417
2418 if (*start == '\0')
949ea356 2419 putc ('\n', file);
3cfa4909 2420 }
b723e82f 2421
e51917ae
JM
2422#ifdef HAVE_AS_GNU_ATTRIBUTE
2423 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2424 {
2425 fprintf (file, "\t.gnu_attribute 4, %d\n",
2426 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2427 fprintf (file, "\t.gnu_attribute 8, %d\n",
2428 (TARGET_ALTIVEC_ABI ? 2
2429 : TARGET_SPE_ABI ? 3
2430 : 1));
2431 }
e51917ae
JM
2432#endif
2433
b723e82f
JJ
2434 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2435 {
d6b5193b
RS
2436 switch_to_section (toc_section);
2437 switch_to_section (text_section);
b723e82f 2438 }
3cfa4909 2439}
c4e18b1c 2440
5248c961 2441\f
a0ab749a 2442/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2443
2444int
863d938c 2445direct_return (void)
9878760c 2446{
4697a36c
MM
2447 if (reload_completed)
2448 {
2449 rs6000_stack_t *info = rs6000_stack_info ();
2450
2451 if (info->first_gp_reg_save == 32
2452 && info->first_fp_reg_save == 64
00b960c7 2453 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2454 && ! info->lr_save_p
2455 && ! info->cr_save_p
00b960c7 2456 && info->vrsave_mask == 0
c81fc13e 2457 && ! info->push_p)
4697a36c
MM
2458 return 1;
2459 }
2460
2461 return 0;
9878760c
RK
2462}
2463
4e74d8ec
MM
2464/* Return the number of instructions it takes to form a constant in an
2465 integer register. */
2466
48d72335 2467int
a2369ed3 2468num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2469{
2470 /* signed constant loadable with {cal|addi} */
547b216d 2471 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2472 return 1;
2473
4e74d8ec 2474 /* constant loadable with {cau|addis} */
547b216d
DE
2475 else if ((value & 0xffff) == 0
2476 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2477 return 1;
2478
5f59ecb7 2479#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2480 else if (TARGET_POWERPC64)
4e74d8ec 2481 {
a65c591c
DE
2482 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2483 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2484
a65c591c 2485 if (high == 0 || high == -1)
4e74d8ec
MM
2486 return 2;
2487
a65c591c 2488 high >>= 1;
4e74d8ec 2489
a65c591c 2490 if (low == 0)
4e74d8ec 2491 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2492 else
2493 return (num_insns_constant_wide (high)
e396202a 2494 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2495 }
2496#endif
2497
2498 else
2499 return 2;
2500}
2501
2502int
a2369ed3 2503num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2504{
37409796 2505 HOST_WIDE_INT low, high;
bb8df8a6 2506
37409796 2507 switch (GET_CODE (op))
0d30d435 2508 {
37409796 2509 case CONST_INT:
0d30d435 2510#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2511 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2512 && mask64_operand (op, mode))
c4ad648e 2513 return 2;
0d30d435
DE
2514 else
2515#endif
2516 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2517
37409796 2518 case CONST_DOUBLE:
e41b2a33 2519 if (mode == SFmode || mode == SDmode)
37409796
NS
2520 {
2521 long l;
2522 REAL_VALUE_TYPE rv;
bb8df8a6 2523
37409796 2524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2525 if (DECIMAL_FLOAT_MODE_P (mode))
2526 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2527 else
2528 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2529 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2530 }
a260abc9 2531
37409796
NS
2532 if (mode == VOIDmode || mode == DImode)
2533 {
2534 high = CONST_DOUBLE_HIGH (op);
2535 low = CONST_DOUBLE_LOW (op);
2536 }
2537 else
2538 {
2539 long l[2];
2540 REAL_VALUE_TYPE rv;
bb8df8a6 2541
37409796 2542 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2543 if (DECIMAL_FLOAT_MODE_P (mode))
2544 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2545 else
2546 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2547 high = l[WORDS_BIG_ENDIAN == 0];
2548 low = l[WORDS_BIG_ENDIAN != 0];
2549 }
47ad8c61 2550
37409796
NS
2551 if (TARGET_32BIT)
2552 return (num_insns_constant_wide (low)
2553 + num_insns_constant_wide (high));
2554 else
2555 {
2556 if ((high == 0 && low >= 0)
2557 || (high == -1 && low < 0))
2558 return num_insns_constant_wide (low);
bb8df8a6 2559
1990cd79 2560 else if (mask64_operand (op, mode))
37409796 2561 return 2;
bb8df8a6 2562
37409796
NS
2563 else if (low == 0)
2564 return num_insns_constant_wide (high) + 1;
bb8df8a6 2565
37409796
NS
2566 else
2567 return (num_insns_constant_wide (high)
2568 + num_insns_constant_wide (low) + 1);
2569 }
bb8df8a6 2570
37409796
NS
2571 default:
2572 gcc_unreachable ();
4e74d8ec 2573 }
4e74d8ec
MM
2574}
2575
0972012c
RS
2576/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2577 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2578 corresponding element of the vector, but for V4SFmode and V2SFmode,
2579 the corresponding "float" is interpreted as an SImode integer. */
2580
2581static HOST_WIDE_INT
2582const_vector_elt_as_int (rtx op, unsigned int elt)
2583{
2584 rtx tmp = CONST_VECTOR_ELT (op, elt);
2585 if (GET_MODE (op) == V4SFmode
2586 || GET_MODE (op) == V2SFmode)
2587 tmp = gen_lowpart (SImode, tmp);
2588 return INTVAL (tmp);
2589}
452a7d36 2590
77ccdfed 2591/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2592 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2593 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2594 all items are set to the same value and contain COPIES replicas of the
2595 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2596 operand and the others are set to the value of the operand's msb. */
2597
2598static bool
2599vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2600{
66180ff3
PB
2601 enum machine_mode mode = GET_MODE (op);
2602 enum machine_mode inner = GET_MODE_INNER (mode);
2603
2604 unsigned i;
2605 unsigned nunits = GET_MODE_NUNITS (mode);
2606 unsigned bitsize = GET_MODE_BITSIZE (inner);
2607 unsigned mask = GET_MODE_MASK (inner);
2608
0972012c 2609 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2610 HOST_WIDE_INT splat_val = val;
2611 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2612
2613 /* Construct the value to be splatted, if possible. If not, return 0. */
2614 for (i = 2; i <= copies; i *= 2)
452a7d36 2615 {
66180ff3
PB
2616 HOST_WIDE_INT small_val;
2617 bitsize /= 2;
2618 small_val = splat_val >> bitsize;
2619 mask >>= bitsize;
2620 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2621 return false;
2622 splat_val = small_val;
2623 }
c4ad648e 2624
66180ff3
PB
2625 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2626 if (EASY_VECTOR_15 (splat_val))
2627 ;
2628
2629 /* Also check if we can splat, and then add the result to itself. Do so if
2630 the value is positive, of if the splat instruction is using OP's mode;
2631 for splat_val < 0, the splat and the add should use the same mode. */
2632 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2633 && (splat_val >= 0 || (step == 1 && copies == 1)))
2634 ;
2635
2636 else
2637 return false;
2638
2639 /* Check if VAL is present in every STEP-th element, and the
2640 other elements are filled with its most significant bit. */
2641 for (i = 0; i < nunits - 1; ++i)
2642 {
2643 HOST_WIDE_INT desired_val;
2644 if (((i + 1) & (step - 1)) == 0)
2645 desired_val = val;
2646 else
2647 desired_val = msb_val;
2648
0972012c 2649 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2650 return false;
452a7d36 2651 }
66180ff3
PB
2652
2653 return true;
452a7d36
HP
2654}
2655
69ef87e2 2656
77ccdfed 2657/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2658 with a vspltisb, vspltish or vspltisw. */
2659
2660bool
2661easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2662{
66180ff3 2663 unsigned step, copies;
d744e06e 2664
66180ff3
PB
2665 if (mode == VOIDmode)
2666 mode = GET_MODE (op);
2667 else if (mode != GET_MODE (op))
2668 return false;
d744e06e 2669
66180ff3
PB
2670 /* Start with a vspltisw. */
2671 step = GET_MODE_NUNITS (mode) / 4;
2672 copies = 1;
2673
2674 if (vspltis_constant (op, step, copies))
2675 return true;
2676
2677 /* Then try with a vspltish. */
2678 if (step == 1)
2679 copies <<= 1;
2680 else
2681 step >>= 1;
2682
2683 if (vspltis_constant (op, step, copies))
2684 return true;
2685
2686 /* And finally a vspltisb. */
2687 if (step == 1)
2688 copies <<= 1;
2689 else
2690 step >>= 1;
2691
2692 if (vspltis_constant (op, step, copies))
2693 return true;
2694
2695 return false;
d744e06e
AH
2696}
2697
66180ff3
PB
2698/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2699 result is OP. Abort if it is not possible. */
d744e06e 2700
f676971a 2701rtx
66180ff3 2702gen_easy_altivec_constant (rtx op)
452a7d36 2703{
66180ff3
PB
2704 enum machine_mode mode = GET_MODE (op);
2705 int nunits = GET_MODE_NUNITS (mode);
2706 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2707 unsigned step = nunits / 4;
2708 unsigned copies = 1;
2709
2710 /* Start with a vspltisw. */
2711 if (vspltis_constant (op, step, copies))
2712 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2713
2714 /* Then try with a vspltish. */
2715 if (step == 1)
2716 copies <<= 1;
2717 else
2718 step >>= 1;
2719
2720 if (vspltis_constant (op, step, copies))
2721 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2722
2723 /* And finally a vspltisb. */
2724 if (step == 1)
2725 copies <<= 1;
2726 else
2727 step >>= 1;
2728
2729 if (vspltis_constant (op, step, copies))
2730 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2731
2732 gcc_unreachable ();
d744e06e
AH
2733}
2734
2735const char *
a2369ed3 2736output_vec_const_move (rtx *operands)
d744e06e
AH
2737{
2738 int cst, cst2;
2739 enum machine_mode mode;
2740 rtx dest, vec;
2741
2742 dest = operands[0];
2743 vec = operands[1];
d744e06e 2744 mode = GET_MODE (dest);
69ef87e2 2745
d744e06e
AH
2746 if (TARGET_ALTIVEC)
2747 {
66180ff3 2748 rtx splat_vec;
d744e06e
AH
2749 if (zero_constant (vec, mode))
2750 return "vxor %0,%0,%0";
37409796 2751
66180ff3
PB
2752 splat_vec = gen_easy_altivec_constant (vec);
2753 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2754 operands[1] = XEXP (splat_vec, 0);
2755 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2756 return "#";
bb8df8a6 2757
66180ff3 2758 switch (GET_MODE (splat_vec))
98ef3137 2759 {
37409796 2760 case V4SImode:
66180ff3 2761 return "vspltisw %0,%1";
c4ad648e 2762
37409796 2763 case V8HImode:
66180ff3 2764 return "vspltish %0,%1";
c4ad648e 2765
37409796 2766 case V16QImode:
66180ff3 2767 return "vspltisb %0,%1";
bb8df8a6 2768
37409796
NS
2769 default:
2770 gcc_unreachable ();
98ef3137 2771 }
69ef87e2
AH
2772 }
2773
37409796 2774 gcc_assert (TARGET_SPE);
bb8df8a6 2775
37409796
NS
2776 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2777 pattern of V1DI, V4HI, and V2SF.
2778
2779 FIXME: We should probably return # and add post reload
2780 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2781 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2782 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2783 operands[1] = CONST_VECTOR_ELT (vec, 0);
2784 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2785 if (cst == cst2)
2786 return "li %0,%1\n\tevmergelo %0,%0,%0";
2787 else
2788 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2789}
2790
f5027409
RE
2791/* Initialize TARGET of vector PAIRED to VALS. */
2792
2793void
2794paired_expand_vector_init (rtx target, rtx vals)
2795{
2796 enum machine_mode mode = GET_MODE (target);
2797 int n_elts = GET_MODE_NUNITS (mode);
2798 int n_var = 0;
2799 rtx x, new, tmp, constant_op, op1, op2;
2800 int i;
2801
2802 for (i = 0; i < n_elts; ++i)
2803 {
2804 x = XVECEXP (vals, 0, i);
2805 if (!CONSTANT_P (x))
2806 ++n_var;
2807 }
2808 if (n_var == 0)
2809 {
2810 /* Load from constant pool. */
2811 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2812 return;
2813 }
2814
2815 if (n_var == 2)
2816 {
2817 /* The vector is initialized only with non-constants. */
2818 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2819 XVECEXP (vals, 0, 1));
2820
2821 emit_move_insn (target, new);
2822 return;
2823 }
2824
2825 /* One field is non-constant and the other one is a constant. Load the
2826 constant from the constant pool and use ps_merge instruction to
2827 construct the whole vector. */
2828 op1 = XVECEXP (vals, 0, 0);
2829 op2 = XVECEXP (vals, 0, 1);
2830
2831 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2832
2833 tmp = gen_reg_rtx (GET_MODE (constant_op));
2834 emit_move_insn (tmp, constant_op);
2835
2836 if (CONSTANT_P (op1))
2837 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2838 else
2839 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2840
2841 emit_move_insn (target, new);
2842}
2843
e2e95f45
RE
2844void
2845paired_expand_vector_move (rtx operands[])
2846{
2847 rtx op0 = operands[0], op1 = operands[1];
2848
2849 emit_move_insn (op0, op1);
2850}
2851
2852/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2853 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2854 operands for the relation operation COND. This is a recursive
2855 function. */
2856
2857static void
2858paired_emit_vector_compare (enum rtx_code rcode,
2859 rtx dest, rtx op0, rtx op1,
2860 rtx cc_op0, rtx cc_op1)
2861{
2862 rtx tmp = gen_reg_rtx (V2SFmode);
2863 rtx tmp1, max, min, equal_zero;
2864
2865 gcc_assert (TARGET_PAIRED_FLOAT);
2866 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2867
2868 switch (rcode)
2869 {
2870 case LT:
2871 case LTU:
2872 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2873 return;
2874 case GE:
2875 case GEU:
2876 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2877 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2878 return;
2879 case LE:
2880 case LEU:
2881 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2882 return;
2883 case GT:
2884 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2885 return;
2886 case EQ:
2887 tmp1 = gen_reg_rtx (V2SFmode);
2888 max = gen_reg_rtx (V2SFmode);
2889 min = gen_reg_rtx (V2SFmode);
2890 equal_zero = gen_reg_rtx (V2SFmode);
2891
2892 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2893 emit_insn (gen_selv2sf4
2894 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2895 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2896 emit_insn (gen_selv2sf4
2897 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2898 emit_insn (gen_subv2sf3 (tmp1, min, max));
2899 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2900 return;
2901 case NE:
2902 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2903 return;
2904 case UNLE:
2905 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2906 return;
2907 case UNLT:
2908 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2909 return;
2910 case UNGE:
2911 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2912 return;
2913 case UNGT:
2914 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2915 return;
2916 default:
2917 gcc_unreachable ();
2918 }
2919
2920 return;
2921}
2922
2923/* Emit vector conditional expression.
2924 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2925 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2926
2927int
2928paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2929 rtx cond, rtx cc_op0, rtx cc_op1)
2930{
2931 enum rtx_code rcode = GET_CODE (cond);
2932
2933 if (!TARGET_PAIRED_FLOAT)
2934 return 0;
2935
2936 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2937
2938 return 1;
2939}
2940
7a4eca66
DE
2941/* Initialize vector TARGET to VALS. */
2942
2943void
2944rs6000_expand_vector_init (rtx target, rtx vals)
2945{
2946 enum machine_mode mode = GET_MODE (target);
2947 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2948 int n_elts = GET_MODE_NUNITS (mode);
2949 int n_var = 0, one_var = -1;
2950 bool all_same = true, all_const_zero = true;
2951 rtx x, mem;
2952 int i;
2953
2954 for (i = 0; i < n_elts; ++i)
2955 {
2956 x = XVECEXP (vals, 0, i);
2957 if (!CONSTANT_P (x))
2958 ++n_var, one_var = i;
2959 else if (x != CONST0_RTX (inner_mode))
2960 all_const_zero = false;
2961
2962 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2963 all_same = false;
2964 }
2965
2966 if (n_var == 0)
2967 {
501fb355 2968 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
2969 if (mode != V4SFmode && all_const_zero)
2970 {
2971 /* Zero register. */
2972 emit_insn (gen_rtx_SET (VOIDmode, target,
2973 gen_rtx_XOR (mode, target, target)));
2974 return;
2975 }
501fb355 2976 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
2977 {
2978 /* Splat immediate. */
501fb355 2979 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
2980 return;
2981 }
2982 else if (all_same)
2983 ; /* Splat vector element. */
2984 else
2985 {
2986 /* Load from constant pool. */
501fb355 2987 emit_move_insn (target, const_vec);
7a4eca66
DE
2988 return;
2989 }
2990 }
2991
2992 /* Store value to stack temp. Load vector element. Splat. */
2993 if (all_same)
2994 {
2995 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2996 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2997 XVECEXP (vals, 0, 0));
2998 x = gen_rtx_UNSPEC (VOIDmode,
2999 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3000 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3001 gen_rtvec (2,
3002 gen_rtx_SET (VOIDmode,
3003 target, mem),
3004 x)));
3005 x = gen_rtx_VEC_SELECT (inner_mode, target,
3006 gen_rtx_PARALLEL (VOIDmode,
3007 gen_rtvec (1, const0_rtx)));
3008 emit_insn (gen_rtx_SET (VOIDmode, target,
3009 gen_rtx_VEC_DUPLICATE (mode, x)));
3010 return;
3011 }
3012
3013 /* One field is non-constant. Load constant then overwrite
3014 varying field. */
3015 if (n_var == 1)
3016 {
3017 rtx copy = copy_rtx (vals);
3018
57b51d4d 3019 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3020 varying element. */
3021 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3022 rs6000_expand_vector_init (target, copy);
3023
3024 /* Insert variable. */
3025 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3026 return;
3027 }
3028
3029 /* Construct the vector in memory one field at a time
3030 and load the whole vector. */
3031 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3032 for (i = 0; i < n_elts; i++)
3033 emit_move_insn (adjust_address_nv (mem, inner_mode,
3034 i * GET_MODE_SIZE (inner_mode)),
3035 XVECEXP (vals, 0, i));
3036 emit_move_insn (target, mem);
3037}
3038
3039/* Set field ELT of TARGET to VAL. */
3040
3041void
3042rs6000_expand_vector_set (rtx target, rtx val, int elt)
3043{
3044 enum machine_mode mode = GET_MODE (target);
3045 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3046 rtx reg = gen_reg_rtx (mode);
3047 rtx mask, mem, x;
3048 int width = GET_MODE_SIZE (inner_mode);
3049 int i;
3050
3051 /* Load single variable value. */
3052 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3053 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3054 x = gen_rtx_UNSPEC (VOIDmode,
3055 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3056 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3057 gen_rtvec (2,
3058 gen_rtx_SET (VOIDmode,
3059 reg, mem),
3060 x)));
3061
3062 /* Linear sequence. */
3063 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3064 for (i = 0; i < 16; ++i)
3065 XVECEXP (mask, 0, i) = GEN_INT (i);
3066
3067 /* Set permute mask to insert element into target. */
3068 for (i = 0; i < width; ++i)
3069 XVECEXP (mask, 0, elt*width + i)
3070 = GEN_INT (i + 0x10);
3071 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3072 x = gen_rtx_UNSPEC (mode,
3073 gen_rtvec (3, target, reg,
3074 force_reg (V16QImode, x)),
3075 UNSPEC_VPERM);
3076 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3077}
3078
3079/* Extract field ELT from VEC into TARGET. */
3080
3081void
3082rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3083{
3084 enum machine_mode mode = GET_MODE (vec);
3085 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3086 rtx mem, x;
3087
3088 /* Allocate mode-sized buffer. */
3089 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3090
3091 /* Add offset to field within buffer matching vector element. */
3092 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3093
3094 /* Store single field into mode-sized buffer. */
3095 x = gen_rtx_UNSPEC (VOIDmode,
3096 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3097 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3098 gen_rtvec (2,
3099 gen_rtx_SET (VOIDmode,
3100 mem, vec),
3101 x)));
3102 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3103}
3104
0ba1b2ff
AM
3105/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3106 implement ANDing by the mask IN. */
3107void
a2369ed3 3108build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3109{
3110#if HOST_BITS_PER_WIDE_INT >= 64
3111 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3112 int shift;
3113
37409796 3114 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3115
3116 c = INTVAL (in);
3117 if (c & 1)
3118 {
3119 /* Assume c initially something like 0x00fff000000fffff. The idea
3120 is to rotate the word so that the middle ^^^^^^ group of zeros
3121 is at the MS end and can be cleared with an rldicl mask. We then
3122 rotate back and clear off the MS ^^ group of zeros with a
3123 second rldicl. */
3124 c = ~c; /* c == 0xff000ffffff00000 */
3125 lsb = c & -c; /* lsb == 0x0000000000100000 */
3126 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3127 c = ~c; /* c == 0x00fff000000fffff */
3128 c &= -lsb; /* c == 0x00fff00000000000 */
3129 lsb = c & -c; /* lsb == 0x0000100000000000 */
3130 c = ~c; /* c == 0xff000fffffffffff */
3131 c &= -lsb; /* c == 0xff00000000000000 */
3132 shift = 0;
3133 while ((lsb >>= 1) != 0)
3134 shift++; /* shift == 44 on exit from loop */
3135 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3136 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3137 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3138 }
3139 else
0ba1b2ff
AM
3140 {
3141 /* Assume c initially something like 0xff000f0000000000. The idea
3142 is to rotate the word so that the ^^^ middle group of zeros
3143 is at the LS end and can be cleared with an rldicr mask. We then
3144 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3145 a second rldicr. */
3146 lsb = c & -c; /* lsb == 0x0000010000000000 */
3147 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3148 c = ~c; /* c == 0x00fff0ffffffffff */
3149 c &= -lsb; /* c == 0x00fff00000000000 */
3150 lsb = c & -c; /* lsb == 0x0000100000000000 */
3151 c = ~c; /* c == 0xff000fffffffffff */
3152 c &= -lsb; /* c == 0xff00000000000000 */
3153 shift = 0;
3154 while ((lsb >>= 1) != 0)
3155 shift++; /* shift == 44 on exit from loop */
3156 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3157 m1 >>= shift; /* m1 == 0x0000000000000fff */
3158 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3159 }
3160
3161 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3162 masks will be all 1's. We are guaranteed more than one transition. */
3163 out[0] = GEN_INT (64 - shift);
3164 out[1] = GEN_INT (m1);
3165 out[2] = GEN_INT (shift);
3166 out[3] = GEN_INT (m2);
3167#else
045572c7
GK
3168 (void)in;
3169 (void)out;
37409796 3170 gcc_unreachable ();
0ba1b2ff 3171#endif
a260abc9
DE
3172}
3173
54b695e7 3174/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3175
3176bool
54b695e7
AH
3177invalid_e500_subreg (rtx op, enum machine_mode mode)
3178{
61c76239
JM
3179 if (TARGET_E500_DOUBLE)
3180 {
17caeff2
JM
3181 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3182 subreg:TI and reg:TF. */
61c76239 3183 if (GET_CODE (op) == SUBREG
17caeff2 3184 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3185 && REG_P (SUBREG_REG (op))
17caeff2 3186 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3187 || GET_MODE (SUBREG_REG (op)) == TFmode
3188 || GET_MODE (SUBREG_REG (op)) == DDmode
3189 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3190 return true;
3191
17caeff2
JM
3192 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3193 reg:TI. */
61c76239 3194 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3195 && (mode == DFmode || mode == TFmode
3196 || mode == DDmode || mode == TDmode)
61c76239 3197 && REG_P (SUBREG_REG (op))
17caeff2
JM
3198 && (GET_MODE (SUBREG_REG (op)) == DImode
3199 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3200 return true;
3201 }
54b695e7 3202
61c76239
JM
3203 if (TARGET_SPE
3204 && GET_CODE (op) == SUBREG
3205 && mode == SImode
54b695e7 3206 && REG_P (SUBREG_REG (op))
14502dad 3207 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3208 return true;
3209
3210 return false;
3211}
3212
58182de3 3213/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3214 field is an FP double while the FP fields remain word aligned. */
3215
19d66194 3216unsigned int
fa5b0972
AM
3217rs6000_special_round_type_align (tree type, unsigned int computed,
3218 unsigned int specified)
95727fb8 3219{
fa5b0972 3220 unsigned int align = MAX (computed, specified);
95727fb8 3221 tree field = TYPE_FIELDS (type);
95727fb8 3222
bb8df8a6 3223 /* Skip all non field decls */
85962ac8 3224 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3225 field = TREE_CHAIN (field);
3226
fa5b0972
AM
3227 if (field != NULL && field != type)
3228 {
3229 type = TREE_TYPE (field);
3230 while (TREE_CODE (type) == ARRAY_TYPE)
3231 type = TREE_TYPE (type);
3232
3233 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3234 align = MAX (align, 64);
3235 }
95727fb8 3236
fa5b0972 3237 return align;
95727fb8
AP
3238}
3239
58182de3
GK
3240/* Darwin increases record alignment to the natural alignment of
3241 the first field. */
3242
3243unsigned int
3244darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3245 unsigned int specified)
3246{
3247 unsigned int align = MAX (computed, specified);
3248
3249 if (TYPE_PACKED (type))
3250 return align;
3251
3252 /* Find the first field, looking down into aggregates. */
3253 do {
3254 tree field = TYPE_FIELDS (type);
3255 /* Skip all non field decls */
3256 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3257 field = TREE_CHAIN (field);
3258 if (! field)
3259 break;
3260 type = TREE_TYPE (field);
3261 while (TREE_CODE (type) == ARRAY_TYPE)
3262 type = TREE_TYPE (type);
3263 } while (AGGREGATE_TYPE_P (type));
3264
3265 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3266 align = MAX (align, TYPE_ALIGN (type));
3267
3268 return align;
3269}
3270
a4f6c312 3271/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3272
3273int
f676971a 3274small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3275 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3276{
38c1f2d7 3277#if TARGET_ELF
5f59ecb7 3278 rtx sym_ref;
7509c759 3279
d9407988 3280 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3281 return 0;
a54d04b7 3282
f607bc57 3283 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3284 return 0;
3285
2aa42e6e
NF
3286 /* Vector and float memory instructions have a limited offset on the
3287 SPE, so using a vector or float variable directly as an operand is
3288 not useful. */
3289 if (TARGET_SPE
3290 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3291 return 0;
3292
88228c4b
MM
3293 if (GET_CODE (op) == SYMBOL_REF)
3294 sym_ref = op;
3295
3296 else if (GET_CODE (op) != CONST
3297 || GET_CODE (XEXP (op, 0)) != PLUS
3298 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3299 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3300 return 0;
3301
88228c4b 3302 else
dbf55e53
MM
3303 {
3304 rtx sum = XEXP (op, 0);
3305 HOST_WIDE_INT summand;
3306
3307 /* We have to be careful here, because it is the referenced address
c4ad648e 3308 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3309 summand = INTVAL (XEXP (sum, 1));
307b599c 3310 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3311 return 0;
dbf55e53
MM
3312
3313 sym_ref = XEXP (sum, 0);
3314 }
88228c4b 3315
20bfcd69 3316 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3317#else
3318 return 0;
3319#endif
7509c759 3320}
46c07df8 3321
3a1f863f 3322/* Return true if either operand is a general purpose register. */
46c07df8 3323
3a1f863f
DE
3324bool
3325gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3326{
3a1f863f
DE
3327 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3328 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3329}
3330
9ebbca7d 3331\f
4d588c14
RH
3332/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3333
f676971a
EC
3334static int
3335constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3336{
9390387d 3337 switch (GET_CODE (op))
9ebbca7d
GK
3338 {
3339 case SYMBOL_REF:
c4501e62
JJ
3340 if (RS6000_SYMBOL_REF_TLS_P (op))
3341 return 0;
3342 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3343 {
3344 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3345 {
3346 *have_sym = 1;
3347 return 1;
3348 }
3349 else
3350 return 0;
3351 }
3352 else if (! strcmp (XSTR (op, 0), toc_label_name))
3353 {
3354 *have_toc = 1;
3355 return 1;
3356 }
3357 else
3358 return 0;
9ebbca7d
GK
3359 case PLUS:
3360 case MINUS:
c1f11548
DE
3361 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3362 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3363 case CONST:
a4f6c312 3364 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3365 case CONST_INT:
a4f6c312 3366 return 1;
9ebbca7d 3367 default:
a4f6c312 3368 return 0;
9ebbca7d
GK
3369 }
3370}
3371
4d588c14 3372static bool
a2369ed3 3373constant_pool_expr_p (rtx op)
9ebbca7d
GK
3374{
3375 int have_sym = 0;
3376 int have_toc = 0;
3377 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3378}
3379
48d72335 3380bool
a2369ed3 3381toc_relative_expr_p (rtx op)
9ebbca7d 3382{
4d588c14
RH
3383 int have_sym = 0;
3384 int have_toc = 0;
3385 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3386}
3387
4d588c14 3388bool
a2369ed3 3389legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3390{
3391 return (TARGET_TOC
3392 && GET_CODE (x) == PLUS
3393 && GET_CODE (XEXP (x, 0)) == REG
3394 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3395 && constant_pool_expr_p (XEXP (x, 1)));
3396}
3397
d04b6e6e
EB
3398static bool
3399legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3400{
3401 return (DEFAULT_ABI == ABI_V4
3402 && !flag_pic && !TARGET_TOC
3403 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3404 && small_data_operand (x, mode));
3405}
3406
60cdabab
DE
3407/* SPE offset addressing is limited to 5-bits worth of double words. */
3408#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3409
76d2b81d
DJ
3410bool
3411rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3412{
3413 unsigned HOST_WIDE_INT offset, extra;
3414
3415 if (GET_CODE (x) != PLUS)
3416 return false;
3417 if (GET_CODE (XEXP (x, 0)) != REG)
3418 return false;
3419 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3420 return false;
60cdabab
DE
3421 if (legitimate_constant_pool_address_p (x))
3422 return true;
4d588c14
RH
3423 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3424 return false;
3425
3426 offset = INTVAL (XEXP (x, 1));
3427 extra = 0;
3428 switch (mode)
3429 {
3430 case V16QImode:
3431 case V8HImode:
3432 case V4SFmode:
3433 case V4SImode:
7a4eca66 3434 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3435 constant offset zero should not occur due to canonicalization. */
3436 return false;
4d588c14
RH
3437
3438 case V4HImode:
3439 case V2SImode:
3440 case V1DImode:
3441 case V2SFmode:
d42a3bae 3442 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3443 constant offset zero should not occur due to canonicalization. */
d42a3bae 3444 if (TARGET_PAIRED_FLOAT)
1a23970d 3445 return false;
4d588c14
RH
3446 /* SPE vector modes. */
3447 return SPE_CONST_OFFSET_OK (offset);
3448
3449 case DFmode:
7393f7f8 3450 case DDmode:
4d4cbc0e
AH
3451 if (TARGET_E500_DOUBLE)
3452 return SPE_CONST_OFFSET_OK (offset);
3453
4d588c14 3454 case DImode:
54b695e7
AH
3455 /* On e500v2, we may have:
3456
3457 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3458
3459 Which gets addressed with evldd instructions. */
3460 if (TARGET_E500_DOUBLE)
3461 return SPE_CONST_OFFSET_OK (offset);
3462
7393f7f8 3463 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3464 extra = 4;
3465 else if (offset & 3)
3466 return false;
3467 break;
3468
3469 case TFmode:
4d4447b5 3470 case TDmode:
17caeff2
JM
3471 if (TARGET_E500_DOUBLE)
3472 return (SPE_CONST_OFFSET_OK (offset)
3473 && SPE_CONST_OFFSET_OK (offset + 8));
3474
4d588c14 3475 case TImode:
7393f7f8 3476 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3477 extra = 12;
3478 else if (offset & 3)
3479 return false;
3480 else
3481 extra = 8;
3482 break;
3483
3484 default:
3485 break;
3486 }
3487
b1917422
AM
3488 offset += 0x8000;
3489 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3490}
3491
6fb5fa3c 3492bool
a2369ed3 3493legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3494{
3495 rtx op0, op1;
3496
3497 if (GET_CODE (x) != PLUS)
3498 return false;
850e8d3d 3499
4d588c14
RH
3500 op0 = XEXP (x, 0);
3501 op1 = XEXP (x, 1);
3502
bf00cc0f 3503 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3504 replaced with proper base and index regs. */
3505 if (!strict
3506 && reload_in_progress
3507 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3508 && REG_P (op1))
3509 return true;
3510
3511 return (REG_P (op0) && REG_P (op1)
3512 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3513 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3514 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3515 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3516}
3517
48d72335 3518inline bool
a2369ed3 3519legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3520{
3521 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3522}
3523
48d72335 3524bool
4c81e946
FJ
3525macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3526{
c4ad648e 3527 if (!TARGET_MACHO || !flag_pic
9390387d 3528 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3529 return false;
3530 x = XEXP (x, 0);
4c81e946
FJ
3531
3532 if (GET_CODE (x) != LO_SUM)
3533 return false;
3534 if (GET_CODE (XEXP (x, 0)) != REG)
3535 return false;
3536 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3537 return false;
3538 x = XEXP (x, 1);
3539
3540 return CONSTANT_P (x);
3541}
3542
4d588c14 3543static bool
a2369ed3 3544legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3545{
3546 if (GET_CODE (x) != LO_SUM)
3547 return false;
3548 if (GET_CODE (XEXP (x, 0)) != REG)
3549 return false;
3550 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3551 return false;
54b695e7 3552 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3553 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3554 || mode == DDmode || mode == TDmode
17caeff2 3555 || mode == DImode))
f82f556d 3556 return false;
4d588c14
RH
3557 x = XEXP (x, 1);
3558
8622e235 3559 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3560 {
a29077da 3561 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3562 return false;
3563 if (TARGET_TOC)
3564 return false;
3565 if (GET_MODE_NUNITS (mode) != 1)
3566 return false;
5e5f01b9 3567 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3568 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3569 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3570 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3571 return false;
3572
3573 return CONSTANT_P (x);
3574 }
3575
3576 return false;
3577}
3578
3579
9ebbca7d
GK
3580/* Try machine-dependent ways of modifying an illegitimate address
3581 to be legitimate. If we find one, return the new, valid address.
3582 This is used from only one place: `memory_address' in explow.c.
3583
a4f6c312
SS
3584 OLDX is the address as it was before break_out_memory_refs was
3585 called. In some cases it is useful to look at this to decide what
3586 needs to be done.
9ebbca7d 3587
a4f6c312 3588 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3589
a4f6c312
SS
3590 It is always safe for this function to do nothing. It exists to
3591 recognize opportunities to optimize the output.
9ebbca7d
GK
3592
3593 On RS/6000, first check for the sum of a register with a constant
3594 integer that is out of range. If so, generate code to add the
3595 constant with the low-order 16 bits masked to the register and force
3596 this result into another register (this can be done with `cau').
3597 Then generate an address of REG+(CONST&0xffff), allowing for the
3598 possibility of bit 16 being a one.
3599
3600 Then check for the sum of a register and something not constant, try to
3601 load the other things into a register and return the sum. */
4d588c14 3602
9ebbca7d 3603rtx
a2369ed3
DJ
3604rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3605 enum machine_mode mode)
0ac081f6 3606{
c4501e62
JJ
3607 if (GET_CODE (x) == SYMBOL_REF)
3608 {
3609 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3610 if (model != 0)
3611 return rs6000_legitimize_tls_address (x, model);
3612 }
3613
f676971a 3614 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3615 && GET_CODE (XEXP (x, 0)) == REG
3616 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3617 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3618 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3619 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb
JM
3620 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3621 || mode == DImode))))
f676971a 3622 {
9ebbca7d
GK
3623 HOST_WIDE_INT high_int, low_int;
3624 rtx sum;
a65c591c
DE
3625 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3626 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3627 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3628 GEN_INT (high_int)), 0);
3629 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3630 }
f676971a 3631 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3632 && GET_CODE (XEXP (x, 0)) == REG
3633 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3634 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3635 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3636 || TARGET_POWERPC64
efc05e3c
PB
3637 || ((mode != DImode && mode != DFmode && mode != DDmode)
3638 || TARGET_E500_DOUBLE))
9ebbca7d 3639 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3640 && mode != TImode
3641 && mode != TFmode
3642 && mode != TDmode)
9ebbca7d
GK
3643 {
3644 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3645 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3646 }
0ac081f6
AH
3647 else if (ALTIVEC_VECTOR_MODE (mode))
3648 {
3649 rtx reg;
3650
3651 /* Make sure both operands are registers. */
3652 if (GET_CODE (x) == PLUS)
9f85ed45 3653 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3654 force_reg (Pmode, XEXP (x, 1)));
3655
3656 reg = force_reg (Pmode, x);
3657 return reg;
3658 }
4d4cbc0e 3659 else if (SPE_VECTOR_MODE (mode)
17caeff2 3660 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3661 || mode == DDmode || mode == TDmode
54b695e7 3662 || mode == DImode)))
a3170dc6 3663 {
54b695e7
AH
3664 if (mode == DImode)
3665 return NULL_RTX;
a3170dc6
AH
3666 /* We accept [reg + reg] and [reg + OFFSET]. */
3667
3668 if (GET_CODE (x) == PLUS)
61dd226f
NF
3669 {
3670 rtx op1 = XEXP (x, 0);
3671 rtx op2 = XEXP (x, 1);
3672 rtx y;
3673
3674 op1 = force_reg (Pmode, op1);
3675
3676 if (GET_CODE (op2) != REG
3677 && (GET_CODE (op2) != CONST_INT
3678 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3679 || (GET_MODE_SIZE (mode) > 8
3680 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3681 op2 = force_reg (Pmode, op2);
3682
3683 /* We can't always do [reg + reg] for these, because [reg +
3684 reg + offset] is not a legitimate addressing mode. */
3685 y = gen_rtx_PLUS (Pmode, op1, op2);
3686
3687 if (GET_MODE_SIZE (mode) > 8 && REG_P (op2))
3688 return force_reg (Pmode, y);
3689 else
3690 return y;
3691 }
a3170dc6
AH
3692
3693 return force_reg (Pmode, x);
3694 }
f1384257
AM
3695 else if (TARGET_ELF
3696 && TARGET_32BIT
3697 && TARGET_NO_TOC
3698 && ! flag_pic
9ebbca7d 3699 && GET_CODE (x) != CONST_INT
f676971a 3700 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3701 && CONSTANT_P (x)
6ac7bf2c
GK
3702 && GET_MODE_NUNITS (mode) == 1
3703 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3704 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3705 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3706 {
3707 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3708 emit_insn (gen_elf_high (reg, x));
3709 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3710 }
ee890fe2
SS
3711 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3712 && ! flag_pic
ab82a49f
AP
3713#if TARGET_MACHO
3714 && ! MACHO_DYNAMIC_NO_PIC_P
3715#endif
ee890fe2 3716 && GET_CODE (x) != CONST_INT
f676971a 3717 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3718 && CONSTANT_P (x)
4d4447b5
PB
3719 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3720 || (mode != DFmode && mode != DDmode))
f676971a 3721 && mode != DImode
ee890fe2
SS
3722 && mode != TImode)
3723 {
3724 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3725 emit_insn (gen_macho_high (reg, x));
3726 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3727 }
f676971a 3728 else if (TARGET_TOC
4d588c14 3729 && constant_pool_expr_p (x)
a9098fd0 3730 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3731 {
3732 return create_TOC_reference (x);
3733 }
3734 else
3735 return NULL_RTX;
3736}
258bfae2 3737
fdbe66f2 3738/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3739 We need to emit DTP-relative relocations. */
3740
fdbe66f2 3741static void
c973d557
JJ
3742rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3743{
3744 switch (size)
3745 {
3746 case 4:
3747 fputs ("\t.long\t", file);
3748 break;
3749 case 8:
3750 fputs (DOUBLE_INT_ASM_OP, file);
3751 break;
3752 default:
37409796 3753 gcc_unreachable ();
c973d557
JJ
3754 }
3755 output_addr_const (file, x);
3756 fputs ("@dtprel+0x8000", file);
3757}
3758
c4501e62
JJ
3759/* Construct the SYMBOL_REF for the tls_get_addr function. */
3760
3761static GTY(()) rtx rs6000_tls_symbol;
3762static rtx
863d938c 3763rs6000_tls_get_addr (void)
c4501e62
JJ
3764{
3765 if (!rs6000_tls_symbol)
3766 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3767
3768 return rs6000_tls_symbol;
3769}
3770
3771/* Construct the SYMBOL_REF for TLS GOT references. */
3772
3773static GTY(()) rtx rs6000_got_symbol;
3774static rtx
863d938c 3775rs6000_got_sym (void)
c4501e62
JJ
3776{
3777 if (!rs6000_got_symbol)
3778 {
3779 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3780 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3781 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3782 }
c4501e62
JJ
3783
3784 return rs6000_got_symbol;
3785}
3786
3787/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3788 this (thread-local) address. */
3789
3790static rtx
a2369ed3 3791rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3792{
3793 rtx dest, insn;
3794
3795 dest = gen_reg_rtx (Pmode);
3796 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3797 {
3798 rtx tlsreg;
3799
3800 if (TARGET_64BIT)
3801 {
3802 tlsreg = gen_rtx_REG (Pmode, 13);
3803 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3804 }
3805 else
3806 {
3807 tlsreg = gen_rtx_REG (Pmode, 2);
3808 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3809 }
3810 emit_insn (insn);
3811 }
3812 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3813 {
3814 rtx tlsreg, tmp;
3815
3816 tmp = gen_reg_rtx (Pmode);
3817 if (TARGET_64BIT)
3818 {
3819 tlsreg = gen_rtx_REG (Pmode, 13);
3820 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3821 }
3822 else
3823 {
3824 tlsreg = gen_rtx_REG (Pmode, 2);
3825 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3826 }
3827 emit_insn (insn);
3828 if (TARGET_64BIT)
3829 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3830 else
3831 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3832 emit_insn (insn);
3833 }
3834 else
3835 {
3836 rtx r3, got, tga, tmp1, tmp2, eqv;
3837
4fed8f8f
AM
3838 /* We currently use relocations like @got@tlsgd for tls, which
3839 means the linker will handle allocation of tls entries, placing
3840 them in the .got section. So use a pointer to the .got section,
3841 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3842 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3843 if (TARGET_64BIT)
972f427b 3844 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3845 else
3846 {
3847 if (flag_pic == 1)
3848 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3849 else
3850 {
3851 rtx gsym = rs6000_got_sym ();
3852 got = gen_reg_rtx (Pmode);
3853 if (flag_pic == 0)
3854 rs6000_emit_move (got, gsym, Pmode);
3855 else
3856 {
e65a3857 3857 rtx tmp3, mem;
c4501e62
JJ
3858 rtx first, last;
3859
c4501e62
JJ
3860 tmp1 = gen_reg_rtx (Pmode);
3861 tmp2 = gen_reg_rtx (Pmode);
3862 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3863 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3864
e65a3857
DE
3865 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3866 emit_move_insn (tmp1,
1de43f85 3867 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3868 emit_move_insn (tmp2, mem);
3869 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3870 last = emit_move_insn (got, tmp3);
bd94cb6e 3871 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3872 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3873 }
3874 }
3875 }
3876
3877 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3878 {
3879 r3 = gen_rtx_REG (Pmode, 3);
3880 if (TARGET_64BIT)
3881 insn = gen_tls_gd_64 (r3, got, addr);
3882 else
3883 insn = gen_tls_gd_32 (r3, got, addr);
3884 start_sequence ();
3885 emit_insn (insn);
3886 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3887 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3888 insn = emit_call_insn (insn);
3889 CONST_OR_PURE_CALL_P (insn) = 1;
3890 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3891 insn = get_insns ();
3892 end_sequence ();
3893 emit_libcall_block (insn, dest, r3, addr);
3894 }
3895 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3896 {
3897 r3 = gen_rtx_REG (Pmode, 3);
3898 if (TARGET_64BIT)
3899 insn = gen_tls_ld_64 (r3, got);
3900 else
3901 insn = gen_tls_ld_32 (r3, got);
3902 start_sequence ();
3903 emit_insn (insn);
3904 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3905 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3906 insn = emit_call_insn (insn);
3907 CONST_OR_PURE_CALL_P (insn) = 1;
3908 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3909 insn = get_insns ();
3910 end_sequence ();
3911 tmp1 = gen_reg_rtx (Pmode);
3912 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3913 UNSPEC_TLSLD);
3914 emit_libcall_block (insn, tmp1, r3, eqv);
3915 if (rs6000_tls_size == 16)
3916 {
3917 if (TARGET_64BIT)
3918 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3919 else
3920 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3921 }
3922 else if (rs6000_tls_size == 32)
3923 {
3924 tmp2 = gen_reg_rtx (Pmode);
3925 if (TARGET_64BIT)
3926 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3927 else
3928 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3929 emit_insn (insn);
3930 if (TARGET_64BIT)
3931 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3932 else
3933 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3934 }
3935 else
3936 {
3937 tmp2 = gen_reg_rtx (Pmode);
3938 if (TARGET_64BIT)
3939 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3940 else
3941 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3942 emit_insn (insn);
3943 insn = gen_rtx_SET (Pmode, dest,
3944 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3945 }
3946 emit_insn (insn);
3947 }
3948 else
3949 {
a7b376ee 3950 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3951 tmp2 = gen_reg_rtx (Pmode);
3952 if (TARGET_64BIT)
3953 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3954 else
3955 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3956 emit_insn (insn);
3957 if (TARGET_64BIT)
3958 insn = gen_tls_tls_64 (dest, tmp2, addr);
3959 else
3960 insn = gen_tls_tls_32 (dest, tmp2, addr);
3961 emit_insn (insn);
3962 }
3963 }
3964
3965 return dest;
3966}
3967
c4501e62
JJ
3968/* Return 1 if X contains a thread-local symbol. */
3969
3970bool
a2369ed3 3971rs6000_tls_referenced_p (rtx x)
c4501e62 3972{
cd413cab
AP
3973 if (! TARGET_HAVE_TLS)
3974 return false;
3975
c4501e62
JJ
3976 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3977}
3978
3979/* Return 1 if *X is a thread-local symbol. This is the same as
3980 rs6000_tls_symbol_ref except for the type of the unused argument. */
3981
9390387d 3982static int
a2369ed3 3983rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3984{
3985 return RS6000_SYMBOL_REF_TLS_P (*x);
3986}
3987
24ea750e
DJ
3988/* The convention appears to be to define this wherever it is used.
3989 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3990 is now used here. */
3991#ifndef REG_MODE_OK_FOR_BASE_P
3992#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3993#endif
3994
3995/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3996 replace the input X, or the original X if no replacement is called for.
3997 The output parameter *WIN is 1 if the calling macro should goto WIN,
3998 0 if it should not.
3999
4000 For RS/6000, we wish to handle large displacements off a base
4001 register by splitting the addend across an addiu/addis and the mem insn.
4002 This cuts number of extra insns needed from 3 to 1.
4003
4004 On Darwin, we use this to generate code for floating point constants.
4005 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4006 The Darwin code is inside #if TARGET_MACHO because only then is
4007 machopic_function_base_name() defined. */
4008rtx
f676971a 4009rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4010 int opnum, int type,
4011 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4012{
f676971a 4013 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4014 if (GET_CODE (x) == PLUS
4015 && GET_CODE (XEXP (x, 0)) == PLUS
4016 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4017 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4018 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4019 {
4020 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4021 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4022 opnum, (enum reload_type)type);
24ea750e
DJ
4023 *win = 1;
4024 return x;
4025 }
3deb2758 4026
24ea750e
DJ
4027#if TARGET_MACHO
4028 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4029 && GET_CODE (x) == LO_SUM
4030 && GET_CODE (XEXP (x, 0)) == PLUS
4031 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4032 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4033 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4034 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4035 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4036 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4037 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4038 {
4039 /* Result of previous invocation of this function on Darwin
6f317ef3 4040 floating point constant. */
24ea750e 4041 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4042 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4043 opnum, (enum reload_type)type);
24ea750e
DJ
4044 *win = 1;
4045 return x;
4046 }
4047#endif
4937d02d
DE
4048
4049 /* Force ld/std non-word aligned offset into base register by wrapping
4050 in offset 0. */
4051 if (GET_CODE (x) == PLUS
4052 && GET_CODE (XEXP (x, 0)) == REG
4053 && REGNO (XEXP (x, 0)) < 32
4054 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4055 && GET_CODE (XEXP (x, 1)) == CONST_INT
4056 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4057 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4058 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4059 && TARGET_POWERPC64)
4060 {
4061 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4062 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4063 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4064 opnum, (enum reload_type) type);
4065 *win = 1;
4066 return x;
4067 }
4068
24ea750e
DJ
4069 if (GET_CODE (x) == PLUS
4070 && GET_CODE (XEXP (x, 0)) == REG
4071 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4072 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4073 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4074 && !SPE_VECTOR_MODE (mode)
17caeff2 4075 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4076 || mode == DDmode || mode == TDmode
54b695e7 4077 || mode == DImode))
78c875e8 4078 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4079 {
4080 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4081 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4082 HOST_WIDE_INT high
c4ad648e 4083 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4084
4085 /* Check for 32-bit overflow. */
4086 if (high + low != val)
c4ad648e 4087 {
24ea750e
DJ
4088 *win = 0;
4089 return x;
4090 }
4091
4092 /* Reload the high part into a base reg; leave the low part
c4ad648e 4093 in the mem directly. */
24ea750e
DJ
4094
4095 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4096 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4097 GEN_INT (high)),
4098 GEN_INT (low));
24ea750e
DJ
4099
4100 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4101 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4102 opnum, (enum reload_type)type);
24ea750e
DJ
4103 *win = 1;
4104 return x;
4105 }
4937d02d 4106
24ea750e 4107 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4108 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4109 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4110#if TARGET_MACHO
4111 && DEFAULT_ABI == ABI_DARWIN
a29077da 4112 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4113#else
4114 && DEFAULT_ABI == ABI_V4
4115 && !flag_pic
4116#endif
7393f7f8 4117 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4118 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4119 without fprs. */
0d8c1c97 4120 && mode != TFmode
7393f7f8 4121 && mode != TDmode
7b5d92b2 4122 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4123 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4124 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4125 {
8308679f 4126#if TARGET_MACHO
a29077da
GK
4127 if (flag_pic)
4128 {
4129 rtx offset = gen_rtx_CONST (Pmode,
4130 gen_rtx_MINUS (Pmode, x,
11abc112 4131 machopic_function_base_sym ()));
a29077da
GK
4132 x = gen_rtx_LO_SUM (GET_MODE (x),
4133 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4134 gen_rtx_HIGH (Pmode, offset)), offset);
4135 }
4136 else
8308679f 4137#endif
a29077da 4138 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4139 gen_rtx_HIGH (Pmode, x), x);
a29077da 4140
24ea750e 4141 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4142 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4143 opnum, (enum reload_type)type);
24ea750e
DJ
4144 *win = 1;
4145 return x;
4146 }
4937d02d 4147
dec1f3aa
DE
4148 /* Reload an offset address wrapped by an AND that represents the
4149 masking of the lower bits. Strip the outer AND and let reload
4150 convert the offset address into an indirect address. */
4151 if (TARGET_ALTIVEC
4152 && ALTIVEC_VECTOR_MODE (mode)
4153 && GET_CODE (x) == AND
4154 && GET_CODE (XEXP (x, 0)) == PLUS
4155 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4156 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4157 && GET_CODE (XEXP (x, 1)) == CONST_INT
4158 && INTVAL (XEXP (x, 1)) == -16)
4159 {
4160 x = XEXP (x, 0);
4161 *win = 1;
4162 return x;
4163 }
4164
24ea750e 4165 if (TARGET_TOC
4d588c14 4166 && constant_pool_expr_p (x)
c1f11548 4167 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4168 {
194c524a 4169 x = create_TOC_reference (x);
24ea750e
DJ
4170 *win = 1;
4171 return x;
4172 }
4173 *win = 0;
4174 return x;
f676971a 4175}
24ea750e 4176
258bfae2
FS
4177/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4178 that is a valid memory address for an instruction.
4179 The MODE argument is the machine mode for the MEM expression
4180 that wants to use this address.
4181
4182 On the RS/6000, there are four valid address: a SYMBOL_REF that
4183 refers to a constant pool entry of an address (or the sum of it
4184 plus a constant), a short (16-bit signed) constant plus a register,
4185 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4186 auto-increment. For DFmode, DDmode and DImode with a constant plus
4187 register, we must ensure that both words are addressable or PowerPC64
4188 with offset word aligned.
258bfae2 4189
4d4447b5 4190 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4191 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4192 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4193 during assembly output. */
4194int
a2369ed3 4195rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4196{
850e8d3d
DN
4197 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4198 if (TARGET_ALTIVEC
4199 && ALTIVEC_VECTOR_MODE (mode)
4200 && GET_CODE (x) == AND
4201 && GET_CODE (XEXP (x, 1)) == CONST_INT
4202 && INTVAL (XEXP (x, 1)) == -16)
4203 x = XEXP (x, 0);
4204
c4501e62
JJ
4205 if (RS6000_SYMBOL_REF_TLS_P (x))
4206 return 0;
4d588c14 4207 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4208 return 1;
4209 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4210 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4211 && !SPE_VECTOR_MODE (mode)
429ec7dc 4212 && mode != TFmode
7393f7f8 4213 && mode != TDmode
54b695e7 4214 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4215 && !(TARGET_E500_DOUBLE
4216 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4217 && TARGET_UPDATE
4d588c14 4218 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4219 return 1;
d04b6e6e 4220 if (legitimate_small_data_p (mode, x))
258bfae2 4221 return 1;
4d588c14 4222 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4223 return 1;
4224 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4225 if (! reg_ok_strict
4226 && GET_CODE (x) == PLUS
4227 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4228 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4229 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4230 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4231 return 1;
76d2b81d 4232 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4233 return 1;
4234 if (mode != TImode
76d2b81d 4235 && mode != TFmode
7393f7f8 4236 && mode != TDmode
a3170dc6
AH
4237 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4238 || TARGET_POWERPC64
4d4447b5 4239 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4240 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4241 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4242 return 1;
6fb5fa3c
DB
4243 if (GET_CODE (x) == PRE_MODIFY
4244 && mode != TImode
4245 && mode != TFmode
4246 && mode != TDmode
4247 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4248 || TARGET_POWERPC64
4d4447b5 4249 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4250 && (TARGET_POWERPC64 || mode != DImode)
4251 && !ALTIVEC_VECTOR_MODE (mode)
4252 && !SPE_VECTOR_MODE (mode)
4253 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4254 && !(TARGET_E500_DOUBLE
4255 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4256 && TARGET_UPDATE
4257 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4258 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4259 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4260 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4261 return 1;
4d588c14 4262 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4263 return 1;
4264 return 0;
4265}
4d588c14
RH
4266
4267/* Go to LABEL if ADDR (a legitimate address expression)
4268 has an effect that depends on the machine mode it is used for.
4269
4270 On the RS/6000 this is true of all integral offsets (since AltiVec
4271 modes don't allow them) or is a pre-increment or decrement.
4272
4273 ??? Except that due to conceptual problems in offsettable_address_p
4274 we can't really report the problems of integral offsets. So leave
f676971a 4275 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4276 sub-words of a TFmode operand, which is what we had before. */
4277
4278bool
a2369ed3 4279rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4280{
4281 switch (GET_CODE (addr))
4282 {
4283 case PLUS:
4284 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4285 {
4286 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4287 return val + 12 + 0x8000 >= 0x10000;
4288 }
4289 break;
4290
4291 case LO_SUM:
4292 return true;
4293
6fb5fa3c
DB
4294 case PRE_INC:
4295 case PRE_DEC:
4296 case PRE_MODIFY:
4297 return TARGET_UPDATE;
4d588c14
RH
4298
4299 default:
4300 break;
4301 }
4302
4303 return false;
4304}
d8ecbcdb 4305
d04b6e6e
EB
4306/* More elaborate version of recog's offsettable_memref_p predicate
4307 that works around the ??? note of rs6000_mode_dependent_address.
4308 In particular it accepts
4309
4310 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4311
4312 in 32-bit mode, that the recog predicate rejects. */
4313
4314bool
4315rs6000_offsettable_memref_p (rtx op)
4316{
4317 if (!MEM_P (op))
4318 return false;
4319
4320 /* First mimic offsettable_memref_p. */
4321 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4322 return true;
4323
4324 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4325 the latter predicate knows nothing about the mode of the memory
4326 reference and, therefore, assumes that it is the largest supported
4327 mode (TFmode). As a consequence, legitimate offsettable memory
4328 references are rejected. rs6000_legitimate_offset_address_p contains
4329 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4330 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4331}
4332
d8ecbcdb
AH
4333/* Return number of consecutive hard regs needed starting at reg REGNO
4334 to hold something of mode MODE.
4335 This is ordinarily the length in words of a value of mode MODE
4336 but can be less for certain modes in special long registers.
4337
4338 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4339 scalar instructions. The upper 32 bits are only available to the
4340 SIMD instructions.
4341
4342 POWER and PowerPC GPRs hold 32 bits worth;
4343 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4344
4345int
4346rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4347{
4348 if (FP_REGNO_P (regno))
4349 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4350
4351 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4352 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4353
4354 if (ALTIVEC_REGNO_P (regno))
4355 return
4356 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4357
8521c414
JM
4358 /* The value returned for SCmode in the E500 double case is 2 for
4359 ABI compatibility; storing an SCmode value in a single register
4360 would require function_arg and rs6000_spe_function_arg to handle
4361 SCmode so as to pass the value correctly in a pair of
4362 registers. */
4363 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4364 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4365
d8ecbcdb
AH
4366 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4367}
2aa4498c
AH
4368
4369/* Change register usage conditional on target flags. */
4370void
4371rs6000_conditional_register_usage (void)
4372{
4373 int i;
4374
4375 /* Set MQ register fixed (already call_used) if not POWER
4376 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4377 be allocated. */
4378 if (! TARGET_POWER)
4379 fixed_regs[64] = 1;
4380
7c9ac5c0 4381 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4382 if (TARGET_64BIT)
4383 fixed_regs[13] = call_used_regs[13]
4384 = call_really_used_regs[13] = 1;
4385
4386 /* Conditionally disable FPRs. */
4387 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4388 for (i = 32; i < 64; i++)
4389 fixed_regs[i] = call_used_regs[i]
c4ad648e 4390 = call_really_used_regs[i] = 1;
2aa4498c 4391
7c9ac5c0
PH
4392 /* The TOC register is not killed across calls in a way that is
4393 visible to the compiler. */
4394 if (DEFAULT_ABI == ABI_AIX)
4395 call_really_used_regs[2] = 0;
4396
2aa4498c
AH
4397 if (DEFAULT_ABI == ABI_V4
4398 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4399 && flag_pic == 2)
4400 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4401
4402 if (DEFAULT_ABI == ABI_V4
4403 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4404 && flag_pic == 1)
4405 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4406 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4407 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4408
4409 if (DEFAULT_ABI == ABI_DARWIN
4410 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4411 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4412 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4413 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4414
b4db40bf
JJ
4415 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4416 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4417 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4418
2aa4498c
AH
4419 if (TARGET_SPE)
4420 {
4421 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4422 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4423 registers in prologues and epilogues. We no longer use r14
4424 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4425 pool for link-compatibility with older versions of GCC. Once
4426 "old" code has died out, we can return r14 to the allocation
4427 pool. */
4428 fixed_regs[14]
4429 = call_used_regs[14]
4430 = call_really_used_regs[14] = 1;
2aa4498c
AH
4431 }
4432
0db747be 4433 if (!TARGET_ALTIVEC)
2aa4498c
AH
4434 {
4435 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4436 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4437 call_really_used_regs[VRSAVE_REGNO] = 1;
4438 }
4439
0db747be
DE
4440 if (TARGET_ALTIVEC)
4441 global_regs[VSCR_REGNO] = 1;
4442
2aa4498c 4443 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4444 {
4445 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4446 call_used_regs[i] = call_really_used_regs[i] = 1;
4447
4448 /* AIX reserves VR20:31 in non-extended ABI mode. */
4449 if (TARGET_XCOFF)
4450 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4451 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4452 }
2aa4498c 4453}
fb4d4348 4454\f
a4f6c312
SS
4455/* Try to output insns to set TARGET equal to the constant C if it can
4456 be done in less than N insns. Do all computations in MODE.
4457 Returns the place where the output has been placed if it can be
4458 done and the insns have been emitted. If it would take more than N
4459 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4460
4461rtx
f676971a 4462rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4463 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4464{
af8cb5c5 4465 rtx result, insn, set;
2bfcf297
DB
4466 HOST_WIDE_INT c0, c1;
4467
37409796 4468 switch (mode)
2bfcf297 4469 {
37409796
NS
4470 case QImode:
4471 case HImode:
2bfcf297 4472 if (dest == NULL)
c4ad648e 4473 dest = gen_reg_rtx (mode);
2bfcf297
DB
4474 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4475 return dest;
bb8df8a6 4476
37409796 4477 case SImode:
b3a13419 4478 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4479
d448860e 4480 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4481 GEN_INT (INTVAL (source)
4482 & (~ (HOST_WIDE_INT) 0xffff))));
4483 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4484 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4485 GEN_INT (INTVAL (source) & 0xffff))));
4486 result = dest;
37409796
NS
4487 break;
4488
4489 case DImode:
4490 switch (GET_CODE (source))
af8cb5c5 4491 {
37409796 4492 case CONST_INT:
af8cb5c5
DE
4493 c0 = INTVAL (source);
4494 c1 = -(c0 < 0);
37409796 4495 break;
bb8df8a6 4496
37409796 4497 case CONST_DOUBLE:
2bfcf297 4498#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4499 c0 = CONST_DOUBLE_LOW (source);
4500 c1 = -(c0 < 0);
2bfcf297 4501#else
af8cb5c5
DE
4502 c0 = CONST_DOUBLE_LOW (source);
4503 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4504#endif
37409796
NS
4505 break;
4506
4507 default:
4508 gcc_unreachable ();
af8cb5c5 4509 }
af8cb5c5
DE
4510
4511 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4512 break;
4513
4514 default:
4515 gcc_unreachable ();
2bfcf297 4516 }
2bfcf297 4517
af8cb5c5
DE
4518 insn = get_last_insn ();
4519 set = single_set (insn);
4520 if (! CONSTANT_P (SET_SRC (set)))
4521 set_unique_reg_note (insn, REG_EQUAL, source);
4522
4523 return result;
2bfcf297
DB
4524}
4525
4526/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4527 fall back to a straight forward decomposition. We do this to avoid
4528 exponential run times encountered when looking for longer sequences
4529 with rs6000_emit_set_const. */
4530static rtx
a2369ed3 4531rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4532{
4533 if (!TARGET_POWERPC64)
4534 {
4535 rtx operand1, operand2;
4536
4537 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4538 DImode);
d448860e 4539 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4540 DImode);
4541 emit_move_insn (operand1, GEN_INT (c1));
4542 emit_move_insn (operand2, GEN_INT (c2));
4543 }
4544 else
4545 {
bc06712d 4546 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4547
bc06712d 4548 ud1 = c1 & 0xffff;
f921c9c9 4549 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4550#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4551 c2 = c1 >> 32;
2bfcf297 4552#endif
bc06712d 4553 ud3 = c2 & 0xffff;
f921c9c9 4554 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4555
f676971a 4556 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4557 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4558 {
bc06712d 4559 if (ud1 & 0x8000)
b78d48dd 4560 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4561 else
4562 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4563 }
2bfcf297 4564
f676971a 4565 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4566 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4567 {
bc06712d 4568 if (ud2 & 0x8000)
f676971a 4569 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4570 - 0x80000000));
252b88f7 4571 else
bc06712d
TR
4572 emit_move_insn (dest, GEN_INT (ud2 << 16));
4573 if (ud1 != 0)
d448860e
JH
4574 emit_move_insn (copy_rtx (dest),
4575 gen_rtx_IOR (DImode, copy_rtx (dest),
4576 GEN_INT (ud1)));
252b88f7 4577 }
f676971a 4578 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4579 || (ud4 == 0 && ! (ud3 & 0x8000)))
4580 {
4581 if (ud3 & 0x8000)
f676971a 4582 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4583 - 0x80000000));
4584 else
4585 emit_move_insn (dest, GEN_INT (ud3 << 16));
4586
4587 if (ud2 != 0)
d448860e
JH
4588 emit_move_insn (copy_rtx (dest),
4589 gen_rtx_IOR (DImode, copy_rtx (dest),
4590 GEN_INT (ud2)));
4591 emit_move_insn (copy_rtx (dest),
4592 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4593 GEN_INT (16)));
bc06712d 4594 if (ud1 != 0)
d448860e
JH
4595 emit_move_insn (copy_rtx (dest),
4596 gen_rtx_IOR (DImode, copy_rtx (dest),
4597 GEN_INT (ud1)));
bc06712d 4598 }
f676971a 4599 else
bc06712d
TR
4600 {
4601 if (ud4 & 0x8000)
f676971a 4602 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4603 - 0x80000000));
4604 else
4605 emit_move_insn (dest, GEN_INT (ud4 << 16));
4606
4607 if (ud3 != 0)
d448860e
JH
4608 emit_move_insn (copy_rtx (dest),
4609 gen_rtx_IOR (DImode, copy_rtx (dest),
4610 GEN_INT (ud3)));
2bfcf297 4611
d448860e
JH
4612 emit_move_insn (copy_rtx (dest),
4613 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4614 GEN_INT (32)));
bc06712d 4615 if (ud2 != 0)
d448860e
JH
4616 emit_move_insn (copy_rtx (dest),
4617 gen_rtx_IOR (DImode, copy_rtx (dest),
4618 GEN_INT (ud2 << 16)));
bc06712d 4619 if (ud1 != 0)
d448860e
JH
4620 emit_move_insn (copy_rtx (dest),
4621 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4622 }
4623 }
2bfcf297
DB
4624 return dest;
4625}
4626
76d2b81d 4627/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4628 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4629
4630static void
4631rs6000_eliminate_indexed_memrefs (rtx operands[2])
4632{
4633 if (GET_CODE (operands[0]) == MEM
4634 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4635 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4636 && ! reload_in_progress)
4637 operands[0]
4638 = replace_equiv_address (operands[0],
4639 copy_addr_to_reg (XEXP (operands[0], 0)));
4640
4641 if (GET_CODE (operands[1]) == MEM
4642 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4643 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4644 && ! reload_in_progress)
4645 operands[1]
4646 = replace_equiv_address (operands[1],
4647 copy_addr_to_reg (XEXP (operands[1], 0)));
4648}
4649
fb4d4348
GK
4650/* Emit a move from SOURCE to DEST in mode MODE. */
4651void
a2369ed3 4652rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4653{
4654 rtx operands[2];
4655 operands[0] = dest;
4656 operands[1] = source;
f676971a 4657
fb4d4348
GK
4658 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4659 if (GET_CODE (operands[1]) == CONST_DOUBLE
4660 && ! FLOAT_MODE_P (mode)
4661 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4662 {
4663 /* FIXME. This should never happen. */
4664 /* Since it seems that it does, do the safe thing and convert
4665 to a CONST_INT. */
2496c7bd 4666 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4667 }
37409796
NS
4668 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4669 || FLOAT_MODE_P (mode)
4670 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4671 || CONST_DOUBLE_LOW (operands[1]) < 0)
4672 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4673 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4674
c9e8cb32
DD
4675 /* Check if GCC is setting up a block move that will end up using FP
4676 registers as temporaries. We must make sure this is acceptable. */
4677 if (GET_CODE (operands[0]) == MEM
4678 && GET_CODE (operands[1]) == MEM
4679 && mode == DImode
41543739
GK
4680 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4681 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4682 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4683 ? 32 : MEM_ALIGN (operands[0])))
4684 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4685 ? 32
41543739
GK
4686 : MEM_ALIGN (operands[1]))))
4687 && ! MEM_VOLATILE_P (operands [0])
4688 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4689 {
41543739
GK
4690 emit_move_insn (adjust_address (operands[0], SImode, 0),
4691 adjust_address (operands[1], SImode, 0));
d448860e
JH
4692 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4693 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4694 return;
4695 }
630d42a0 4696
b3a13419 4697 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4698 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4699 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4700
a3170dc6
AH
4701 if (mode == SFmode && ! TARGET_POWERPC
4702 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4703 && GET_CODE (operands[0]) == MEM)
fb4d4348 4704 {
ffc14f31
GK
4705 int regnum;
4706
4707 if (reload_in_progress || reload_completed)
4708 regnum = true_regnum (operands[1]);
4709 else if (GET_CODE (operands[1]) == REG)
4710 regnum = REGNO (operands[1]);
4711 else
4712 regnum = -1;
f676971a 4713
fb4d4348
GK
4714 /* If operands[1] is a register, on POWER it may have
4715 double-precision data in it, so truncate it to single
4716 precision. */
4717 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4718 {
4719 rtx newreg;
b3a13419 4720 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4721 : gen_reg_rtx (mode));
fb4d4348
GK
4722 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4723 operands[1] = newreg;
4724 }
4725 }
4726
c4501e62
JJ
4727 /* Recognize the case where operand[1] is a reference to thread-local
4728 data and load its address to a register. */
84f52ebd 4729 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4730 {
84f52ebd
RH
4731 enum tls_model model;
4732 rtx tmp = operands[1];
4733 rtx addend = NULL;
4734
4735 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4736 {
4737 addend = XEXP (XEXP (tmp, 0), 1);
4738 tmp = XEXP (XEXP (tmp, 0), 0);
4739 }
4740
4741 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4742 model = SYMBOL_REF_TLS_MODEL (tmp);
4743 gcc_assert (model != 0);
4744
4745 tmp = rs6000_legitimize_tls_address (tmp, model);
4746 if (addend)
4747 {
4748 tmp = gen_rtx_PLUS (mode, tmp, addend);
4749 tmp = force_operand (tmp, operands[0]);
4750 }
4751 operands[1] = tmp;
c4501e62
JJ
4752 }
4753
8f4e6caf
RH
4754 /* Handle the case where reload calls us with an invalid address. */
4755 if (reload_in_progress && mode == Pmode
69ef87e2 4756 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4757 || ! nonimmediate_operand (operands[0], mode)))
4758 goto emit_set;
4759
a9baceb1
GK
4760 /* 128-bit constant floating-point values on Darwin should really be
4761 loaded as two parts. */
8521c414 4762 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4763 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4764 {
4765 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4766 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4767 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4768 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4769 simplify_gen_subreg (imode, operands[1], mode, 0),
4770 imode);
4771 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4772 GET_MODE_SIZE (imode)),
4773 simplify_gen_subreg (imode, operands[1], mode,
4774 GET_MODE_SIZE (imode)),
4775 imode);
a9baceb1
GK
4776 return;
4777 }
4778
e41b2a33
PB
4779 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4780 cfun->machine->sdmode_stack_slot =
4781 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4782
4783 if (reload_in_progress
4784 && mode == SDmode
4785 && MEM_P (operands[0])
4786 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4787 && REG_P (operands[1]))
4788 {
4789 if (FP_REGNO_P (REGNO (operands[1])))
4790 {
4791 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4792 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4793 emit_insn (gen_movsd_store (mem, operands[1]));
4794 }
4795 else if (INT_REGNO_P (REGNO (operands[1])))
4796 {
4797 rtx mem = adjust_address_nv (operands[0], mode, 4);
4798 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4799 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4800 }
4801 else
4802 gcc_unreachable();
4803 return;
4804 }
4805 if (reload_in_progress
4806 && mode == SDmode
4807 && REG_P (operands[0])
4808 && MEM_P (operands[1])
4809 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4810 {
4811 if (FP_REGNO_P (REGNO (operands[0])))
4812 {
4813 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4814 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4815 emit_insn (gen_movsd_load (operands[0], mem));
4816 }
4817 else if (INT_REGNO_P (REGNO (operands[0])))
4818 {
4819 rtx mem = adjust_address_nv (operands[1], mode, 4);
4820 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4821 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4822 }
4823 else
4824 gcc_unreachable();
4825 return;
4826 }
4827
fb4d4348
GK
4828 /* FIXME: In the long term, this switch statement should go away
4829 and be replaced by a sequence of tests based on things like
4830 mode == Pmode. */
4831 switch (mode)
4832 {
4833 case HImode:
4834 case QImode:
4835 if (CONSTANT_P (operands[1])
4836 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4837 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4838 break;
4839
06f4e019 4840 case TFmode:
7393f7f8 4841 case TDmode:
76d2b81d
DJ
4842 rs6000_eliminate_indexed_memrefs (operands);
4843 /* fall through */
4844
fb4d4348 4845 case DFmode:
7393f7f8 4846 case DDmode:
fb4d4348 4847 case SFmode:
e41b2a33 4848 case SDmode:
f676971a 4849 if (CONSTANT_P (operands[1])
fb4d4348 4850 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4851 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4852 break;
f676971a 4853
0ac081f6
AH
4854 case V16QImode:
4855 case V8HImode:
4856 case V4SFmode:
4857 case V4SImode:
a3170dc6
AH
4858 case V4HImode:
4859 case V2SFmode:
4860 case V2SImode:
00a892b8 4861 case V1DImode:
69ef87e2 4862 if (CONSTANT_P (operands[1])
d744e06e 4863 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4864 operands[1] = force_const_mem (mode, operands[1]);
4865 break;
f676971a 4866
fb4d4348 4867 case SImode:
a9098fd0 4868 case DImode:
fb4d4348
GK
4869 /* Use default pattern for address of ELF small data */
4870 if (TARGET_ELF
a9098fd0 4871 && mode == Pmode
f607bc57 4872 && DEFAULT_ABI == ABI_V4
f676971a 4873 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4874 || GET_CODE (operands[1]) == CONST)
4875 && small_data_operand (operands[1], mode))
fb4d4348
GK
4876 {
4877 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4878 return;
4879 }
4880
f607bc57 4881 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4882 && mode == Pmode && mode == SImode
4883 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4884 {
4885 emit_insn (gen_movsi_got (operands[0], operands[1]));
4886 return;
4887 }
4888
ee890fe2 4889 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4890 && TARGET_NO_TOC
4891 && ! flag_pic
a9098fd0 4892 && mode == Pmode
fb4d4348
GK
4893 && CONSTANT_P (operands[1])
4894 && GET_CODE (operands[1]) != HIGH
4895 && GET_CODE (operands[1]) != CONST_INT)
4896 {
b3a13419
ILT
4897 rtx target = (!can_create_pseudo_p ()
4898 ? operands[0]
4899 : gen_reg_rtx (mode));
fb4d4348
GK
4900
4901 /* If this is a function address on -mcall-aixdesc,
4902 convert it to the address of the descriptor. */
4903 if (DEFAULT_ABI == ABI_AIX
4904 && GET_CODE (operands[1]) == SYMBOL_REF
4905 && XSTR (operands[1], 0)[0] == '.')
4906 {
4907 const char *name = XSTR (operands[1], 0);
4908 rtx new_ref;
4909 while (*name == '.')
4910 name++;
4911 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4912 CONSTANT_POOL_ADDRESS_P (new_ref)
4913 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4914 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4915 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4916 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4917 operands[1] = new_ref;
4918 }
7509c759 4919
ee890fe2
SS
4920 if (DEFAULT_ABI == ABI_DARWIN)
4921 {
ab82a49f
AP
4922#if TARGET_MACHO
4923 if (MACHO_DYNAMIC_NO_PIC_P)
4924 {
4925 /* Take care of any required data indirection. */
4926 operands[1] = rs6000_machopic_legitimize_pic_address (
4927 operands[1], mode, operands[0]);
4928 if (operands[0] != operands[1])
4929 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4930 operands[0], operands[1]));
ab82a49f
AP
4931 return;
4932 }
4933#endif
b8a55285
AP
4934 emit_insn (gen_macho_high (target, operands[1]));
4935 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4936 return;
4937 }
4938
fb4d4348
GK
4939 emit_insn (gen_elf_high (target, operands[1]));
4940 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4941 return;
4942 }
4943
a9098fd0
GK
4944 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4945 and we have put it in the TOC, we just need to make a TOC-relative
4946 reference to it. */
4947 if (TARGET_TOC
4948 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4949 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4950 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4951 get_pool_mode (operands[1])))
fb4d4348 4952 {
a9098fd0 4953 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4954 }
a9098fd0
GK
4955 else if (mode == Pmode
4956 && CONSTANT_P (operands[1])
38886f37
AO
4957 && ((GET_CODE (operands[1]) != CONST_INT
4958 && ! easy_fp_constant (operands[1], mode))
4959 || (GET_CODE (operands[1]) == CONST_INT
4960 && num_insns_constant (operands[1], mode) > 2)
4961 || (GET_CODE (operands[0]) == REG
4962 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4963 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4964 && ! legitimate_constant_pool_address_p (operands[1])
4965 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4966 {
4967 /* Emit a USE operation so that the constant isn't deleted if
4968 expensive optimizations are turned on because nobody
4969 references it. This should only be done for operands that
4970 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4971 This should not be done for operands that contain LABEL_REFs.
4972 For now, we just handle the obvious case. */
4973 if (GET_CODE (operands[1]) != LABEL_REF)
4974 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4975
c859cda6 4976#if TARGET_MACHO
ee890fe2 4977 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4978 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4979 {
ee890fe2
SS
4980 operands[1] =
4981 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4982 operands[0]);
4983 if (operands[0] != operands[1])
4984 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4985 return;
4986 }
c859cda6 4987#endif
ee890fe2 4988
fb4d4348
GK
4989 /* If we are to limit the number of things we put in the TOC and
4990 this is a symbol plus a constant we can add in one insn,
4991 just put the symbol in the TOC and add the constant. Don't do
4992 this if reload is in progress. */
4993 if (GET_CODE (operands[1]) == CONST
4994 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4995 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4996 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4997 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4998 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4999 && ! side_effects_p (operands[0]))
5000 {
a4f6c312
SS
5001 rtx sym =
5002 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5003 rtx other = XEXP (XEXP (operands[1], 0), 1);
5004
a9098fd0
GK
5005 sym = force_reg (mode, sym);
5006 if (mode == SImode)
5007 emit_insn (gen_addsi3 (operands[0], sym, other));
5008 else
5009 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5010 return;
5011 }
5012
a9098fd0 5013 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5014
f676971a 5015 if (TARGET_TOC
4d588c14 5016 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5017 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5018 get_pool_constant (XEXP (operands[1], 0)),
5019 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5020 {
ba4828e0 5021 operands[1]
542a8afa 5022 = gen_const_mem (mode,
c4ad648e 5023 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5024 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5025 }
fb4d4348
GK
5026 }
5027 break;
a9098fd0 5028
fb4d4348 5029 case TImode:
76d2b81d
DJ
5030 rs6000_eliminate_indexed_memrefs (operands);
5031
27dc0551
DE
5032 if (TARGET_POWER)
5033 {
5034 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5035 gen_rtvec (2,
5036 gen_rtx_SET (VOIDmode,
5037 operands[0], operands[1]),
5038 gen_rtx_CLOBBER (VOIDmode,
5039 gen_rtx_SCRATCH (SImode)))));
5040 return;
5041 }
fb4d4348
GK
5042 break;
5043
5044 default:
37409796 5045 gcc_unreachable ();
fb4d4348
GK
5046 }
5047
a9098fd0
GK
5048 /* Above, we may have called force_const_mem which may have returned
5049 an invalid address. If we can, fix this up; otherwise, reload will
5050 have to deal with it. */
8f4e6caf
RH
5051 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5052 operands[1] = validize_mem (operands[1]);
a9098fd0 5053
8f4e6caf 5054 emit_set:
fb4d4348
GK
5055 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5056}
4697a36c 5057\f
2858f73a
GK
5058/* Nonzero if we can use a floating-point register to pass this arg. */
5059#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5060 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5061 && (CUM)->fregno <= FP_ARG_MAX_REG \
5062 && TARGET_HARD_FLOAT && TARGET_FPRS)
5063
5064/* Nonzero if we can use an AltiVec register to pass this arg. */
5065#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5066 (ALTIVEC_VECTOR_MODE (MODE) \
5067 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5068 && TARGET_ALTIVEC_ABI \
83953138 5069 && (NAMED))
2858f73a 5070
c6e8c921
GK
5071/* Return a nonzero value to say to return the function value in
5072 memory, just as large structures are always returned. TYPE will be
5073 the data type of the value, and FNTYPE will be the type of the
5074 function doing the returning, or @code{NULL} for libcalls.
5075
5076 The AIX ABI for the RS/6000 specifies that all structures are
5077 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5078 specifies that structures <= 8 bytes are returned in r3/r4, but a
5079 draft put them in memory, and GCC used to implement the draft
df01da37 5080 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5081 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5082 compatibility can change DRAFT_V4_STRUCT_RET to override the
5083 default, and -m switches get the final word. See
5084 rs6000_override_options for more details.
5085
5086 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5087 long double support is enabled. These values are returned in memory.
5088
5089 int_size_in_bytes returns -1 for variable size objects, which go in
5090 memory always. The cast to unsigned makes -1 > 8. */
5091
5092static bool
586de218 5093rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5094{
594a51fe
SS
5095 /* In the darwin64 abi, try to use registers for larger structs
5096 if possible. */
0b5383eb 5097 if (rs6000_darwin64_abi
594a51fe 5098 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5099 && int_size_in_bytes (type) > 0)
5100 {
5101 CUMULATIVE_ARGS valcum;
5102 rtx valret;
5103
5104 valcum.words = 0;
5105 valcum.fregno = FP_ARG_MIN_REG;
5106 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5107 /* Do a trial code generation as if this were going to be passed
5108 as an argument; if any part goes in memory, we return NULL. */
5109 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5110 if (valret)
5111 return false;
5112 /* Otherwise fall through to more conventional ABI rules. */
5113 }
594a51fe 5114
c6e8c921 5115 if (AGGREGATE_TYPE_P (type)
df01da37 5116 && (aix_struct_return
c6e8c921
GK
5117 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5118 return true;
b693336b 5119
bada2eb8
DE
5120 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5121 modes only exist for GCC vector types if -maltivec. */
5122 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5123 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5124 return false;
5125
b693336b
PB
5126 /* Return synthetic vectors in memory. */
5127 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5128 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5129 {
5130 static bool warned_for_return_big_vectors = false;
5131 if (!warned_for_return_big_vectors)
5132 {
d4ee4d25 5133 warning (0, "GCC vector returned by reference: "
b693336b
PB
5134 "non-standard ABI extension with no compatibility guarantee");
5135 warned_for_return_big_vectors = true;
5136 }
5137 return true;
5138 }
5139
602ea4d3 5140 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5141 return true;
ad630bef 5142
c6e8c921
GK
5143 return false;
5144}
5145
4697a36c
MM
5146/* Initialize a variable CUM of type CUMULATIVE_ARGS
5147 for a call to a function whose data type is FNTYPE.
5148 For a library call, FNTYPE is 0.
5149
5150 For incoming args we set the number of arguments in the prototype large
1c20ae99 5151 so we never return a PARALLEL. */
4697a36c
MM
5152
5153void
f676971a 5154init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5155 rtx libname ATTRIBUTE_UNUSED, int incoming,
5156 int libcall, int n_named_args)
4697a36c
MM
5157{
5158 static CUMULATIVE_ARGS zero_cumulative;
5159
5160 *cum = zero_cumulative;
5161 cum->words = 0;
5162 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5163 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5164 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5165 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5166 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5167 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5168 cum->stdarg = fntype
5169 && (TYPE_ARG_TYPES (fntype) != 0
5170 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5171 != void_type_node));
4697a36c 5172
0f6937fe
AM
5173 cum->nargs_prototype = 0;
5174 if (incoming || cum->prototype)
5175 cum->nargs_prototype = n_named_args;
4697a36c 5176
a5c76ee6 5177 /* Check for a longcall attribute. */
3eb4e360
AM
5178 if ((!fntype && rs6000_default_long_calls)
5179 || (fntype
5180 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5181 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5182 cum->call_cookie |= CALL_LONG;
6a4cee5f 5183
4697a36c
MM
5184 if (TARGET_DEBUG_ARG)
5185 {
5186 fprintf (stderr, "\ninit_cumulative_args:");
5187 if (fntype)
5188 {
5189 tree ret_type = TREE_TYPE (fntype);
5190 fprintf (stderr, " ret code = %s,",
5191 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5192 }
5193
6a4cee5f
MM
5194 if (cum->call_cookie & CALL_LONG)
5195 fprintf (stderr, " longcall,");
5196
4697a36c
MM
5197 fprintf (stderr, " proto = %d, nargs = %d\n",
5198 cum->prototype, cum->nargs_prototype);
5199 }
f676971a 5200
c4ad648e
AM
5201 if (fntype
5202 && !TARGET_ALTIVEC
5203 && TARGET_ALTIVEC_ABI
5204 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5205 {
c85ce869 5206 error ("cannot return value in vector register because"
c4ad648e 5207 " altivec instructions are disabled, use -maltivec"
c85ce869 5208 " to enable them");
c4ad648e 5209 }
4697a36c
MM
5210}
5211\f
fe984136
RH
5212/* Return true if TYPE must be passed on the stack and not in registers. */
5213
5214static bool
586de218 5215rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5216{
5217 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5218 return must_pass_in_stack_var_size (mode, type);
5219 else
5220 return must_pass_in_stack_var_size_or_pad (mode, type);
5221}
5222
c229cba9
DE
5223/* If defined, a C expression which determines whether, and in which
5224 direction, to pad out an argument with extra space. The value
5225 should be of type `enum direction': either `upward' to pad above
5226 the argument, `downward' to pad below, or `none' to inhibit
5227 padding.
5228
5229 For the AIX ABI structs are always stored left shifted in their
5230 argument slot. */
5231
9ebbca7d 5232enum direction
586de218 5233function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5234{
6e985040
AM
5235#ifndef AGGREGATE_PADDING_FIXED
5236#define AGGREGATE_PADDING_FIXED 0
5237#endif
5238#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5239#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5240#endif
5241
5242 if (!AGGREGATE_PADDING_FIXED)
5243 {
5244 /* GCC used to pass structures of the same size as integer types as
5245 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5246 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5247 passed padded downward, except that -mstrict-align further
5248 muddied the water in that multi-component structures of 2 and 4
5249 bytes in size were passed padded upward.
5250
5251 The following arranges for best compatibility with previous
5252 versions of gcc, but removes the -mstrict-align dependency. */
5253 if (BYTES_BIG_ENDIAN)
5254 {
5255 HOST_WIDE_INT size = 0;
5256
5257 if (mode == BLKmode)
5258 {
5259 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5260 size = int_size_in_bytes (type);
5261 }
5262 else
5263 size = GET_MODE_SIZE (mode);
5264
5265 if (size == 1 || size == 2 || size == 4)
5266 return downward;
5267 }
5268 return upward;
5269 }
5270
5271 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5272 {
5273 if (type != 0 && AGGREGATE_TYPE_P (type))
5274 return upward;
5275 }
c229cba9 5276
d3704c46
KH
5277 /* Fall back to the default. */
5278 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5279}
5280
b6c9286a 5281/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5282 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5283 PARM_BOUNDARY is used for all arguments.
f676971a 5284
84e9ad15
AM
5285 V.4 wants long longs and doubles to be double word aligned. Just
5286 testing the mode size is a boneheaded way to do this as it means
5287 that other types such as complex int are also double word aligned.
5288 However, we're stuck with this because changing the ABI might break
5289 existing library interfaces.
5290
b693336b
PB
5291 Doubleword align SPE vectors.
5292 Quadword align Altivec vectors.
5293 Quadword align large synthetic vector types. */
b6c9286a
MM
5294
5295int
b693336b 5296function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5297{
84e9ad15
AM
5298 if (DEFAULT_ABI == ABI_V4
5299 && (GET_MODE_SIZE (mode) == 8
5300 || (TARGET_HARD_FLOAT
5301 && TARGET_FPRS
7393f7f8 5302 && (mode == TFmode || mode == TDmode))))
4ed78545 5303 return 64;
ad630bef
DE
5304 else if (SPE_VECTOR_MODE (mode)
5305 || (type && TREE_CODE (type) == VECTOR_TYPE
5306 && int_size_in_bytes (type) >= 8
5307 && int_size_in_bytes (type) < 16))
e1f83b4d 5308 return 64;
ad630bef
DE
5309 else if (ALTIVEC_VECTOR_MODE (mode)
5310 || (type && TREE_CODE (type) == VECTOR_TYPE
5311 && int_size_in_bytes (type) >= 16))
0ac081f6 5312 return 128;
0b5383eb
DJ
5313 else if (rs6000_darwin64_abi && mode == BLKmode
5314 && type && TYPE_ALIGN (type) > 64)
5315 return 128;
9ebbca7d 5316 else
b6c9286a 5317 return PARM_BOUNDARY;
b6c9286a 5318}
c53bdcf5 5319
294bd182
AM
5320/* For a function parm of MODE and TYPE, return the starting word in
5321 the parameter area. NWORDS of the parameter area are already used. */
5322
5323static unsigned int
5324rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5325{
5326 unsigned int align;
5327 unsigned int parm_offset;
5328
5329 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5330 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5331 return nwords + (-(parm_offset + nwords) & align);
5332}
5333
c53bdcf5
AM
5334/* Compute the size (in words) of a function argument. */
5335
5336static unsigned long
5337rs6000_arg_size (enum machine_mode mode, tree type)
5338{
5339 unsigned long size;
5340
5341 if (mode != BLKmode)
5342 size = GET_MODE_SIZE (mode);
5343 else
5344 size = int_size_in_bytes (type);
5345
5346 if (TARGET_32BIT)
5347 return (size + 3) >> 2;
5348 else
5349 return (size + 7) >> 3;
5350}
b6c9286a 5351\f
0b5383eb 5352/* Use this to flush pending int fields. */
594a51fe
SS
5353
5354static void
0b5383eb
DJ
5355rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5356 HOST_WIDE_INT bitpos)
594a51fe 5357{
0b5383eb
DJ
5358 unsigned int startbit, endbit;
5359 int intregs, intoffset;
5360 enum machine_mode mode;
594a51fe 5361
0b5383eb
DJ
5362 if (cum->intoffset == -1)
5363 return;
594a51fe 5364
0b5383eb
DJ
5365 intoffset = cum->intoffset;
5366 cum->intoffset = -1;
5367
5368 if (intoffset % BITS_PER_WORD != 0)
5369 {
5370 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5371 MODE_INT, 0);
5372 if (mode == BLKmode)
594a51fe 5373 {
0b5383eb
DJ
5374 /* We couldn't find an appropriate mode, which happens,
5375 e.g., in packed structs when there are 3 bytes to load.
5376 Back intoffset back to the beginning of the word in this
5377 case. */
5378 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5379 }
594a51fe 5380 }
0b5383eb
DJ
5381
5382 startbit = intoffset & -BITS_PER_WORD;
5383 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5384 intregs = (endbit - startbit) / BITS_PER_WORD;
5385 cum->words += intregs;
5386}
5387
5388/* The darwin64 ABI calls for us to recurse down through structs,
5389 looking for elements passed in registers. Unfortunately, we have
5390 to track int register count here also because of misalignments
5391 in powerpc alignment mode. */
5392
5393static void
5394rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5395 tree type,
5396 HOST_WIDE_INT startbitpos)
5397{
5398 tree f;
5399
5400 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5401 if (TREE_CODE (f) == FIELD_DECL)
5402 {
5403 HOST_WIDE_INT bitpos = startbitpos;
5404 tree ftype = TREE_TYPE (f);
70fb00df
AP
5405 enum machine_mode mode;
5406 if (ftype == error_mark_node)
5407 continue;
5408 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5409
5410 if (DECL_SIZE (f) != 0
5411 && host_integerp (bit_position (f), 1))
5412 bitpos += int_bit_position (f);
5413
5414 /* ??? FIXME: else assume zero offset. */
5415
5416 if (TREE_CODE (ftype) == RECORD_TYPE)
5417 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5418 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5419 {
5420 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5421 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5422 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5423 }
5424 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5425 {
5426 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5427 cum->vregno++;
5428 cum->words += 2;
5429 }
5430 else if (cum->intoffset == -1)
5431 cum->intoffset = bitpos;
5432 }
594a51fe
SS
5433}
5434
4697a36c
MM
5435/* Update the data in CUM to advance over an argument
5436 of mode MODE and data type TYPE.
b2d04ecf
AM
5437 (TYPE is null for libcalls where that information may not be available.)
5438
5439 Note that for args passed by reference, function_arg will be called
5440 with MODE and TYPE set to that of the pointer to the arg, not the arg
5441 itself. */
4697a36c
MM
5442
5443void
f676971a 5444function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5445 tree type, int named, int depth)
4697a36c 5446{
0b5383eb
DJ
5447 int size;
5448
594a51fe
SS
5449 /* Only tick off an argument if we're not recursing. */
5450 if (depth == 0)
5451 cum->nargs_prototype--;
4697a36c 5452
ad630bef
DE
5453 if (TARGET_ALTIVEC_ABI
5454 && (ALTIVEC_VECTOR_MODE (mode)
5455 || (type && TREE_CODE (type) == VECTOR_TYPE
5456 && int_size_in_bytes (type) == 16)))
0ac081f6 5457 {
4ed78545
AM
5458 bool stack = false;
5459
2858f73a 5460 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5461 {
6d0ef01e
HP
5462 cum->vregno++;
5463 if (!TARGET_ALTIVEC)
c85ce869 5464 error ("cannot pass argument in vector register because"
6d0ef01e 5465 " altivec instructions are disabled, use -maltivec"
c85ce869 5466 " to enable them");
4ed78545
AM
5467
5468 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5469 even if it is going to be passed in a vector register.
4ed78545
AM
5470 Darwin does the same for variable-argument functions. */
5471 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5472 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5473 stack = true;
6d0ef01e 5474 }
4ed78545
AM
5475 else
5476 stack = true;
5477
5478 if (stack)
c4ad648e 5479 {
a594a19c 5480 int align;
f676971a 5481
2858f73a
GK
5482 /* Vector parameters must be 16-byte aligned. This places
5483 them at 2 mod 4 in terms of words in 32-bit mode, since
5484 the parameter save area starts at offset 24 from the
5485 stack. In 64-bit mode, they just have to start on an
5486 even word, since the parameter save area is 16-byte
5487 aligned. Space for GPRs is reserved even if the argument
5488 will be passed in memory. */
5489 if (TARGET_32BIT)
4ed78545 5490 align = (2 - cum->words) & 3;
2858f73a
GK
5491 else
5492 align = cum->words & 1;
c53bdcf5 5493 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5494
a594a19c
GK
5495 if (TARGET_DEBUG_ARG)
5496 {
f676971a 5497 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5498 cum->words, align);
5499 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5500 cum->nargs_prototype, cum->prototype,
2858f73a 5501 GET_MODE_NAME (mode));
a594a19c
GK
5502 }
5503 }
0ac081f6 5504 }
a4b0320c 5505 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5506 && !cum->stdarg
5507 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5508 cum->sysv_gregno++;
594a51fe
SS
5509
5510 else if (rs6000_darwin64_abi
5511 && mode == BLKmode
0b5383eb
DJ
5512 && TREE_CODE (type) == RECORD_TYPE
5513 && (size = int_size_in_bytes (type)) > 0)
5514 {
5515 /* Variable sized types have size == -1 and are
5516 treated as if consisting entirely of ints.
5517 Pad to 16 byte boundary if needed. */
5518 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5519 && (cum->words % 2) != 0)
5520 cum->words++;
5521 /* For varargs, we can just go up by the size of the struct. */
5522 if (!named)
5523 cum->words += (size + 7) / 8;
5524 else
5525 {
5526 /* It is tempting to say int register count just goes up by
5527 sizeof(type)/8, but this is wrong in a case such as
5528 { int; double; int; } [powerpc alignment]. We have to
5529 grovel through the fields for these too. */
5530 cum->intoffset = 0;
5531 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5532 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5533 size * BITS_PER_UNIT);
5534 }
5535 }
f607bc57 5536 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5537 {
a3170dc6 5538 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5539 && (mode == SFmode || mode == DFmode
e41b2a33 5540 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5541 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5542 {
2d83f070
JJ
5543 /* _Decimal128 must use an even/odd register pair. This assumes
5544 that the register number is odd when fregno is odd. */
5545 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5546 cum->fregno++;
5547
5548 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5549 <= FP_ARG_V4_MAX_REG)
602ea4d3 5550 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5551 else
5552 {
602ea4d3 5553 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5554 if (mode == DFmode || mode == TFmode
5555 || mode == DDmode || mode == TDmode)
c4ad648e 5556 cum->words += cum->words & 1;
c53bdcf5 5557 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5558 }
4697a36c 5559 }
4cc833b7
RH
5560 else
5561 {
b2d04ecf 5562 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5563 int gregno = cum->sysv_gregno;
5564
4ed78545
AM
5565 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5566 (r7,r8) or (r9,r10). As does any other 2 word item such
5567 as complex int due to a historical mistake. */
5568 if (n_words == 2)
5569 gregno += (1 - gregno) & 1;
4cc833b7 5570
4ed78545 5571 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5572 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5573 {
4ed78545
AM
5574 /* Long long and SPE vectors are aligned on the stack.
5575 So are other 2 word items such as complex int due to
5576 a historical mistake. */
4cc833b7
RH
5577 if (n_words == 2)
5578 cum->words += cum->words & 1;
5579 cum->words += n_words;
5580 }
4697a36c 5581
4cc833b7
RH
5582 /* Note: continuing to accumulate gregno past when we've started
5583 spilling to the stack indicates the fact that we've started
5584 spilling to the stack to expand_builtin_saveregs. */
5585 cum->sysv_gregno = gregno + n_words;
5586 }
4697a36c 5587
4cc833b7
RH
5588 if (TARGET_DEBUG_ARG)
5589 {
5590 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5591 cum->words, cum->fregno);
5592 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5593 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5594 fprintf (stderr, "mode = %4s, named = %d\n",
5595 GET_MODE_NAME (mode), named);
5596 }
4697a36c
MM
5597 }
5598 else
4cc833b7 5599 {
b2d04ecf 5600 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5601 int start_words = cum->words;
5602 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5603
294bd182 5604 cum->words = align_words + n_words;
4697a36c 5605
ebb109ad 5606 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5607 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5608 {
5609 /* _Decimal128 must be passed in an even/odd float register pair.
5610 This assumes that the register number is odd when fregno is
5611 odd. */
5612 if (mode == TDmode && (cum->fregno % 2) == 1)
5613 cum->fregno++;
5614 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5615 }
4cc833b7
RH
5616
5617 if (TARGET_DEBUG_ARG)
5618 {
5619 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5620 cum->words, cum->fregno);
5621 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5622 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5623 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5624 named, align_words - start_words, depth);
4cc833b7
RH
5625 }
5626 }
4697a36c 5627}
a6c9bed4 5628
f82f556d
AH
5629static rtx
5630spe_build_register_parallel (enum machine_mode mode, int gregno)
5631{
17caeff2 5632 rtx r1, r3, r5, r7;
f82f556d 5633
37409796 5634 switch (mode)
f82f556d 5635 {
37409796 5636 case DFmode:
4d4447b5 5637 case DDmode:
54b695e7
AH
5638 r1 = gen_rtx_REG (DImode, gregno);
5639 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5640 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5641
5642 case DCmode:
17caeff2 5643 case TFmode:
4d4447b5 5644 case TDmode:
54b695e7
AH
5645 r1 = gen_rtx_REG (DImode, gregno);
5646 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5647 r3 = gen_rtx_REG (DImode, gregno + 2);
5648 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5649 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5650
17caeff2
JM
5651 case TCmode:
5652 r1 = gen_rtx_REG (DImode, gregno);
5653 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5654 r3 = gen_rtx_REG (DImode, gregno + 2);
5655 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5656 r5 = gen_rtx_REG (DImode, gregno + 4);
5657 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5658 r7 = gen_rtx_REG (DImode, gregno + 6);
5659 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5660 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5661
37409796
NS
5662 default:
5663 gcc_unreachable ();
f82f556d 5664 }
f82f556d 5665}
b78d48dd 5666
f82f556d 5667/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5668static rtx
f676971a 5669rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5670 tree type)
a6c9bed4 5671{
f82f556d
AH
5672 int gregno = cum->sysv_gregno;
5673
5674 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5675 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5676 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5677 || mode == DDmode || mode == TDmode
5678 || mode == DCmode || mode == TCmode))
f82f556d 5679 {
b5870bee
AH
5680 int n_words = rs6000_arg_size (mode, type);
5681
f82f556d 5682 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5683 if (mode == DFmode || mode == DDmode)
b5870bee 5684 gregno += (1 - gregno) & 1;
f82f556d 5685
b5870bee
AH
5686 /* Multi-reg args are not split between registers and stack. */
5687 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5688 return NULL_RTX;
5689
5690 return spe_build_register_parallel (mode, gregno);
5691 }
a6c9bed4
AH
5692 if (cum->stdarg)
5693 {
c53bdcf5 5694 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5695
5696 /* SPE vectors are put in odd registers. */
5697 if (n_words == 2 && (gregno & 1) == 0)
5698 gregno += 1;
5699
5700 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5701 {
5702 rtx r1, r2;
5703 enum machine_mode m = SImode;
5704
5705 r1 = gen_rtx_REG (m, gregno);
5706 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5707 r2 = gen_rtx_REG (m, gregno + 1);
5708 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5709 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5710 }
5711 else
b78d48dd 5712 return NULL_RTX;
a6c9bed4
AH
5713 }
5714 else
5715 {
f82f556d
AH
5716 if (gregno <= GP_ARG_MAX_REG)
5717 return gen_rtx_REG (mode, gregno);
a6c9bed4 5718 else
b78d48dd 5719 return NULL_RTX;
a6c9bed4
AH
5720 }
5721}
5722
0b5383eb
DJ
5723/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5724 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5725
0b5383eb 5726static void
bb8df8a6 5727rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5728 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5729{
0b5383eb
DJ
5730 enum machine_mode mode;
5731 unsigned int regno;
5732 unsigned int startbit, endbit;
5733 int this_regno, intregs, intoffset;
5734 rtx reg;
594a51fe 5735
0b5383eb
DJ
5736 if (cum->intoffset == -1)
5737 return;
5738
5739 intoffset = cum->intoffset;
5740 cum->intoffset = -1;
5741
5742 /* If this is the trailing part of a word, try to only load that
5743 much into the register. Otherwise load the whole register. Note
5744 that in the latter case we may pick up unwanted bits. It's not a
5745 problem at the moment but may wish to revisit. */
5746
5747 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5748 {
0b5383eb
DJ
5749 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5750 MODE_INT, 0);
5751 if (mode == BLKmode)
5752 {
5753 /* We couldn't find an appropriate mode, which happens,
5754 e.g., in packed structs when there are 3 bytes to load.
5755 Back intoffset back to the beginning of the word in this
5756 case. */
5757 intoffset = intoffset & -BITS_PER_WORD;
5758 mode = word_mode;
5759 }
5760 }
5761 else
5762 mode = word_mode;
5763
5764 startbit = intoffset & -BITS_PER_WORD;
5765 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5766 intregs = (endbit - startbit) / BITS_PER_WORD;
5767 this_regno = cum->words + intoffset / BITS_PER_WORD;
5768
5769 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5770 cum->use_stack = 1;
bb8df8a6 5771
0b5383eb
DJ
5772 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5773 if (intregs <= 0)
5774 return;
5775
5776 intoffset /= BITS_PER_UNIT;
5777 do
5778 {
5779 regno = GP_ARG_MIN_REG + this_regno;
5780 reg = gen_rtx_REG (mode, regno);
5781 rvec[(*k)++] =
5782 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5783
5784 this_regno += 1;
5785 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5786 mode = word_mode;
5787 intregs -= 1;
5788 }
5789 while (intregs > 0);
5790}
5791
5792/* Recursive workhorse for the following. */
5793
5794static void
586de218 5795rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5796 HOST_WIDE_INT startbitpos, rtx rvec[],
5797 int *k)
5798{
5799 tree f;
5800
5801 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5802 if (TREE_CODE (f) == FIELD_DECL)
5803 {
5804 HOST_WIDE_INT bitpos = startbitpos;
5805 tree ftype = TREE_TYPE (f);
70fb00df
AP
5806 enum machine_mode mode;
5807 if (ftype == error_mark_node)
5808 continue;
5809 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5810
5811 if (DECL_SIZE (f) != 0
5812 && host_integerp (bit_position (f), 1))
5813 bitpos += int_bit_position (f);
5814
5815 /* ??? FIXME: else assume zero offset. */
5816
5817 if (TREE_CODE (ftype) == RECORD_TYPE)
5818 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5819 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5820 {
0b5383eb
DJ
5821#if 0
5822 switch (mode)
594a51fe 5823 {
0b5383eb
DJ
5824 case SCmode: mode = SFmode; break;
5825 case DCmode: mode = DFmode; break;
5826 case TCmode: mode = TFmode; break;
5827 default: break;
594a51fe 5828 }
0b5383eb
DJ
5829#endif
5830 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5831 rvec[(*k)++]
bb8df8a6 5832 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5833 gen_rtx_REG (mode, cum->fregno++),
5834 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5835 if (mode == TFmode || mode == TDmode)
0b5383eb 5836 cum->fregno++;
594a51fe 5837 }
0b5383eb
DJ
5838 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5839 {
5840 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5841 rvec[(*k)++]
bb8df8a6
EC
5842 = gen_rtx_EXPR_LIST (VOIDmode,
5843 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5844 GEN_INT (bitpos / BITS_PER_UNIT));
5845 }
5846 else if (cum->intoffset == -1)
5847 cum->intoffset = bitpos;
5848 }
5849}
594a51fe 5850
0b5383eb
DJ
5851/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5852 the register(s) to be used for each field and subfield of a struct
5853 being passed by value, along with the offset of where the
5854 register's value may be found in the block. FP fields go in FP
5855 register, vector fields go in vector registers, and everything
bb8df8a6 5856 else goes in int registers, packed as in memory.
8ff40a74 5857
0b5383eb
DJ
5858 This code is also used for function return values. RETVAL indicates
5859 whether this is the case.
8ff40a74 5860
a4d05547 5861 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5862 calling convention. */
594a51fe 5863
0b5383eb 5864static rtx
586de218 5865rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5866 int named, bool retval)
5867{
5868 rtx rvec[FIRST_PSEUDO_REGISTER];
5869 int k = 1, kbase = 1;
5870 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5871 /* This is a copy; modifications are not visible to our caller. */
5872 CUMULATIVE_ARGS copy_cum = *orig_cum;
5873 CUMULATIVE_ARGS *cum = &copy_cum;
5874
5875 /* Pad to 16 byte boundary if needed. */
5876 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5877 && (cum->words % 2) != 0)
5878 cum->words++;
5879
5880 cum->intoffset = 0;
5881 cum->use_stack = 0;
5882 cum->named = named;
5883
5884 /* Put entries into rvec[] for individual FP and vector fields, and
5885 for the chunks of memory that go in int regs. Note we start at
5886 element 1; 0 is reserved for an indication of using memory, and
5887 may or may not be filled in below. */
5888 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5889 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5890
5891 /* If any part of the struct went on the stack put all of it there.
5892 This hack is because the generic code for
5893 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5894 parts of the struct are not at the beginning. */
5895 if (cum->use_stack)
5896 {
5897 if (retval)
5898 return NULL_RTX; /* doesn't go in registers at all */
5899 kbase = 0;
5900 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5901 }
5902 if (k > 1 || cum->use_stack)
5903 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5904 else
5905 return NULL_RTX;
5906}
5907
b78d48dd
FJ
5908/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5909
5910static rtx
ec6376ab 5911rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5912{
ec6376ab
AM
5913 int n_units;
5914 int i, k;
5915 rtx rvec[GP_ARG_NUM_REG + 1];
5916
5917 if (align_words >= GP_ARG_NUM_REG)
5918 return NULL_RTX;
5919
5920 n_units = rs6000_arg_size (mode, type);
5921
5922 /* Optimize the simple case where the arg fits in one gpr, except in
5923 the case of BLKmode due to assign_parms assuming that registers are
5924 BITS_PER_WORD wide. */
5925 if (n_units == 0
5926 || (n_units == 1 && mode != BLKmode))
5927 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5928
5929 k = 0;
5930 if (align_words + n_units > GP_ARG_NUM_REG)
5931 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5932 using a magic NULL_RTX component.
79773478
AM
5933 This is not strictly correct. Only some of the arg belongs in
5934 memory, not all of it. However, the normal scheme using
5935 function_arg_partial_nregs can result in unusual subregs, eg.
5936 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5937 store the whole arg to memory is often more efficient than code
5938 to store pieces, and we know that space is available in the right
5939 place for the whole arg. */
ec6376ab
AM
5940 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5941
5942 i = 0;
5943 do
36a454e1 5944 {
ec6376ab
AM
5945 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5946 rtx off = GEN_INT (i++ * 4);
5947 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5948 }
ec6376ab
AM
5949 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5950
5951 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5952}
5953
4697a36c
MM
5954/* Determine where to put an argument to a function.
5955 Value is zero to push the argument on the stack,
5956 or a hard register in which to store the argument.
5957
5958 MODE is the argument's machine mode.
5959 TYPE is the data type of the argument (as a tree).
5960 This is null for libcalls where that information may
5961 not be available.
5962 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5963 the preceding args and about the function being called. It is
5964 not modified in this routine.
4697a36c
MM
5965 NAMED is nonzero if this argument is a named parameter
5966 (otherwise it is an extra parameter matching an ellipsis).
5967
5968 On RS/6000 the first eight words of non-FP are normally in registers
5969 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5970 Under V.4, the first 8 FP args are in registers.
5971
5972 If this is floating-point and no prototype is specified, we use
5973 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5974 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5975 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5976 doesn't support PARALLEL anyway.
5977
5978 Note that for args passed by reference, function_arg will be called
5979 with MODE and TYPE set to that of the pointer to the arg, not the arg
5980 itself. */
4697a36c 5981
9390387d 5982rtx
f676971a 5983function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5984 tree type, int named)
4697a36c 5985{
4cc833b7 5986 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5987
a4f6c312
SS
5988 /* Return a marker to indicate whether CR1 needs to set or clear the
5989 bit that V.4 uses to say fp args were passed in registers.
5990 Assume that we don't need the marker for software floating point,
5991 or compiler generated library calls. */
4697a36c
MM
5992 if (mode == VOIDmode)
5993 {
f607bc57 5994 if (abi == ABI_V4
b9599e46 5995 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5996 && (cum->stdarg
5997 || (cum->nargs_prototype < 0
5998 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5999 {
a3170dc6
AH
6000 /* For the SPE, we need to crxor CR6 always. */
6001 if (TARGET_SPE_ABI)
6002 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6003 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6004 return GEN_INT (cum->call_cookie
6005 | ((cum->fregno == FP_ARG_MIN_REG)
6006 ? CALL_V4_SET_FP_ARGS
6007 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6008 }
4697a36c 6009
7509c759 6010 return GEN_INT (cum->call_cookie);
4697a36c
MM
6011 }
6012
0b5383eb
DJ
6013 if (rs6000_darwin64_abi && mode == BLKmode
6014 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6015 {
0b5383eb 6016 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6017 if (rslt != NULL_RTX)
6018 return rslt;
6019 /* Else fall through to usual handling. */
6020 }
6021
2858f73a 6022 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6023 if (TARGET_64BIT && ! cum->prototype)
6024 {
c4ad648e
AM
6025 /* Vector parameters get passed in vector register
6026 and also in GPRs or memory, in absence of prototype. */
6027 int align_words;
6028 rtx slot;
6029 align_words = (cum->words + 1) & ~1;
6030
6031 if (align_words >= GP_ARG_NUM_REG)
6032 {
6033 slot = NULL_RTX;
6034 }
6035 else
6036 {
6037 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6038 }
6039 return gen_rtx_PARALLEL (mode,
6040 gen_rtvec (2,
6041 gen_rtx_EXPR_LIST (VOIDmode,
6042 slot, const0_rtx),
6043 gen_rtx_EXPR_LIST (VOIDmode,
6044 gen_rtx_REG (mode, cum->vregno),
6045 const0_rtx)));
c72d6c26
HP
6046 }
6047 else
6048 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6049 else if (TARGET_ALTIVEC_ABI
6050 && (ALTIVEC_VECTOR_MODE (mode)
6051 || (type && TREE_CODE (type) == VECTOR_TYPE
6052 && int_size_in_bytes (type) == 16)))
0ac081f6 6053 {
2858f73a 6054 if (named || abi == ABI_V4)
a594a19c 6055 return NULL_RTX;
0ac081f6 6056 else
a594a19c
GK
6057 {
6058 /* Vector parameters to varargs functions under AIX or Darwin
6059 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6060 int align, align_words, n_words;
6061 enum machine_mode part_mode;
a594a19c
GK
6062
6063 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6064 2 mod 4 in terms of words in 32-bit mode, since the parameter
6065 save area starts at offset 24 from the stack. In 64-bit mode,
6066 they just have to start on an even word, since the parameter
6067 save area is 16-byte aligned. */
6068 if (TARGET_32BIT)
4ed78545 6069 align = (2 - cum->words) & 3;
2858f73a
GK
6070 else
6071 align = cum->words & 1;
a594a19c
GK
6072 align_words = cum->words + align;
6073
6074 /* Out of registers? Memory, then. */
6075 if (align_words >= GP_ARG_NUM_REG)
6076 return NULL_RTX;
ec6376ab
AM
6077
6078 if (TARGET_32BIT && TARGET_POWERPC64)
6079 return rs6000_mixed_function_arg (mode, type, align_words);
6080
2858f73a
GK
6081 /* The vector value goes in GPRs. Only the part of the
6082 value in GPRs is reported here. */
ec6376ab
AM
6083 part_mode = mode;
6084 n_words = rs6000_arg_size (mode, type);
6085 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6086 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6087 is either wholly in GPRs or half in GPRs and half not. */
6088 part_mode = DImode;
ec6376ab
AM
6089
6090 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6091 }
0ac081f6 6092 }
f82f556d
AH
6093 else if (TARGET_SPE_ABI && TARGET_SPE
6094 && (SPE_VECTOR_MODE (mode)
18f63bfa 6095 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 6096 || mode == DDmode
17caeff2
JM
6097 || mode == DCmode
6098 || mode == TFmode
7393f7f8 6099 || mode == TDmode
17caeff2 6100 || mode == TCmode))))
a6c9bed4 6101 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6102
f607bc57 6103 else if (abi == ABI_V4)
4697a36c 6104 {
a3170dc6 6105 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6106 && (mode == SFmode || mode == DFmode
7393f7f8 6107 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6108 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6109 {
2d83f070
JJ
6110 /* _Decimal128 must use an even/odd register pair. This assumes
6111 that the register number is odd when fregno is odd. */
6112 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6113 cum->fregno++;
6114
6115 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6116 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6117 return gen_rtx_REG (mode, cum->fregno);
6118 else
b78d48dd 6119 return NULL_RTX;
4cc833b7
RH
6120 }
6121 else
6122 {
b2d04ecf 6123 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6124 int gregno = cum->sysv_gregno;
6125
4ed78545
AM
6126 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6127 (r7,r8) or (r9,r10). As does any other 2 word item such
6128 as complex int due to a historical mistake. */
6129 if (n_words == 2)
6130 gregno += (1 - gregno) & 1;
4cc833b7 6131
4ed78545 6132 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6133 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6134 return NULL_RTX;
ec6376ab
AM
6135
6136 if (TARGET_32BIT && TARGET_POWERPC64)
6137 return rs6000_mixed_function_arg (mode, type,
6138 gregno - GP_ARG_MIN_REG);
6139 return gen_rtx_REG (mode, gregno);
4cc833b7 6140 }
4697a36c 6141 }
4cc833b7
RH
6142 else
6143 {
294bd182 6144 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6145
2d83f070
JJ
6146 /* _Decimal128 must be passed in an even/odd float register pair.
6147 This assumes that the register number is odd when fregno is odd. */
6148 if (mode == TDmode && (cum->fregno % 2) == 1)
6149 cum->fregno++;
6150
2858f73a 6151 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6152 {
ec6376ab
AM
6153 rtx rvec[GP_ARG_NUM_REG + 1];
6154 rtx r;
6155 int k;
c53bdcf5
AM
6156 bool needs_psave;
6157 enum machine_mode fmode = mode;
c53bdcf5
AM
6158 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6159
6160 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6161 {
c53bdcf5
AM
6162 /* Currently, we only ever need one reg here because complex
6163 doubles are split. */
7393f7f8
BE
6164 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6165 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6166
7393f7f8
BE
6167 /* Long double or _Decimal128 split over regs and memory. */
6168 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6169 }
c53bdcf5
AM
6170
6171 /* Do we also need to pass this arg in the parameter save
6172 area? */
6173 needs_psave = (type
6174 && (cum->nargs_prototype <= 0
6175 || (DEFAULT_ABI == ABI_AIX
de17c25f 6176 && TARGET_XL_COMPAT
c53bdcf5
AM
6177 && align_words >= GP_ARG_NUM_REG)));
6178
6179 if (!needs_psave && mode == fmode)
ec6376ab 6180 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6181
ec6376ab 6182 k = 0;
c53bdcf5
AM
6183 if (needs_psave)
6184 {
ec6376ab 6185 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6186 This piece must come first, before the fprs. */
c53bdcf5
AM
6187 if (align_words < GP_ARG_NUM_REG)
6188 {
6189 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6190
6191 if (align_words + n_words > GP_ARG_NUM_REG
6192 || (TARGET_32BIT && TARGET_POWERPC64))
6193 {
6194 /* If this is partially on the stack, then we only
6195 include the portion actually in registers here. */
6196 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6197 rtx off;
79773478
AM
6198 int i = 0;
6199 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6200 /* Not all of the arg fits in gprs. Say that it
6201 goes in memory too, using a magic NULL_RTX
6202 component. Also see comment in
6203 rs6000_mixed_function_arg for why the normal
6204 function_arg_partial_nregs scheme doesn't work
6205 in this case. */
6206 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6207 const0_rtx);
ec6376ab
AM
6208 do
6209 {
6210 r = gen_rtx_REG (rmode,
6211 GP_ARG_MIN_REG + align_words);
2e6c9641 6212 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6213 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6214 }
6215 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6216 }
6217 else
6218 {
6219 /* The whole arg fits in gprs. */
6220 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6221 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6222 }
c53bdcf5 6223 }
ec6376ab
AM
6224 else
6225 /* It's entirely in memory. */
6226 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6227 }
6228
ec6376ab
AM
6229 /* Describe where this piece goes in the fprs. */
6230 r = gen_rtx_REG (fmode, cum->fregno);
6231 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6232
6233 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6234 }
6235 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6236 {
ec6376ab
AM
6237 if (TARGET_32BIT && TARGET_POWERPC64)
6238 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6239
4eeca74f
AM
6240 if (mode == BLKmode)
6241 mode = Pmode;
6242
b2d04ecf
AM
6243 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6244 }
4cc833b7
RH
6245 else
6246 return NULL_RTX;
4697a36c 6247 }
4697a36c
MM
6248}
6249\f
ec6376ab 6250/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6251 the number of bytes passed in registers. For args passed entirely in
6252 registers or entirely in memory, zero. When an arg is described by a
6253 PARALLEL, perhaps using more than one register type, this function
6254 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6255
78a52f11
RH
6256static int
6257rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6258 tree type, bool named)
4697a36c 6259{
c53bdcf5 6260 int ret = 0;
ec6376ab 6261 int align_words;
c53bdcf5 6262
f607bc57 6263 if (DEFAULT_ABI == ABI_V4)
4697a36c 6264 return 0;
4697a36c 6265
c53bdcf5
AM
6266 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6267 && cum->nargs_prototype >= 0)
6268 return 0;
6269
0b5383eb
DJ
6270 /* In this complicated case we just disable the partial_nregs code. */
6271 if (rs6000_darwin64_abi && mode == BLKmode
6272 && TREE_CODE (type) == RECORD_TYPE
6273 && int_size_in_bytes (type) > 0)
6274 return 0;
6275
294bd182 6276 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6277
79773478
AM
6278 if (USE_FP_FOR_ARG_P (cum, mode, type))
6279 {
fb63c729
AM
6280 /* If we are passing this arg in the fixed parameter save area
6281 (gprs or memory) as well as fprs, then this function should
79773478
AM
6282 return the number of partial bytes passed in the parameter
6283 save area rather than partial bytes passed in fprs. */
6284 if (type
6285 && (cum->nargs_prototype <= 0
6286 || (DEFAULT_ABI == ABI_AIX
6287 && TARGET_XL_COMPAT
6288 && align_words >= GP_ARG_NUM_REG)))
6289 return 0;
6290 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6291 > FP_ARG_MAX_REG + 1)
ac7e839c 6292 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6293 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6294 return 0;
6295 }
6296
ec6376ab
AM
6297 if (align_words < GP_ARG_NUM_REG
6298 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6299 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6300
c53bdcf5 6301 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6302 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6303
c53bdcf5 6304 return ret;
4697a36c
MM
6305}
6306\f
6307/* A C expression that indicates when an argument must be passed by
6308 reference. If nonzero for an argument, a copy of that argument is
6309 made in memory and a pointer to the argument is passed instead of
6310 the argument itself. The pointer is passed in whatever way is
6311 appropriate for passing a pointer to that type.
6312
b2d04ecf
AM
6313 Under V.4, aggregates and long double are passed by reference.
6314
6315 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6316 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6317
6318 As an extension to all ABIs, variable sized types are passed by
6319 reference. */
4697a36c 6320
8cd5a4e0 6321static bool
f676971a 6322rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6323 enum machine_mode mode, const_tree type,
bada2eb8 6324 bool named ATTRIBUTE_UNUSED)
4697a36c 6325{
602ea4d3 6326 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6327 {
6328 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6329 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6330 return 1;
6331 }
6332
6333 if (!type)
6334 return 0;
4697a36c 6335
bada2eb8
DE
6336 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6337 {
6338 if (TARGET_DEBUG_ARG)
6339 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6340 return 1;
6341 }
6342
6343 if (int_size_in_bytes (type) < 0)
6344 {
6345 if (TARGET_DEBUG_ARG)
6346 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6347 return 1;
6348 }
6349
6350 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6351 modes only exist for GCC vector types if -maltivec. */
6352 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6353 {
6354 if (TARGET_DEBUG_ARG)
6355 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6356 return 1;
6357 }
b693336b
PB
6358
6359 /* Pass synthetic vectors in memory. */
bada2eb8 6360 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6361 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6362 {
6363 static bool warned_for_pass_big_vectors = false;
6364 if (TARGET_DEBUG_ARG)
6365 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6366 if (!warned_for_pass_big_vectors)
6367 {
d4ee4d25 6368 warning (0, "GCC vector passed by reference: "
b693336b
PB
6369 "non-standard ABI extension with no compatibility guarantee");
6370 warned_for_pass_big_vectors = true;
6371 }
6372 return 1;
6373 }
6374
b2d04ecf 6375 return 0;
4697a36c 6376}
5985c7a6
FJ
6377
6378static void
2d9db8eb 6379rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6380{
6381 int i;
6382 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6383
6384 if (nregs == 0)
6385 return;
6386
c4ad648e 6387 for (i = 0; i < nregs; i++)
5985c7a6 6388 {
9390387d 6389 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6390 if (reload_completed)
c4ad648e
AM
6391 {
6392 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6393 tem = NULL_RTX;
6394 else
6395 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6396 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6397 }
5985c7a6
FJ
6398 else
6399 tem = replace_equiv_address (tem, XEXP (tem, 0));
6400
37409796 6401 gcc_assert (tem);
5985c7a6
FJ
6402
6403 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6404 }
6405}
4697a36c
MM
6406\f
6407/* Perform any needed actions needed for a function that is receiving a
f676971a 6408 variable number of arguments.
4697a36c
MM
6409
6410 CUM is as above.
6411
6412 MODE and TYPE are the mode and type of the current parameter.
6413
6414 PRETEND_SIZE is a variable that should be set to the amount of stack
6415 that must be pushed by the prolog to pretend that our caller pushed
6416 it.
6417
6418 Normally, this macro will push all remaining incoming registers on the
6419 stack and set PRETEND_SIZE to the length of the registers pushed. */
6420
c6e8c921 6421static void
f676971a 6422setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6423 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6424 int no_rtl)
4697a36c 6425{
4cc833b7
RH
6426 CUMULATIVE_ARGS next_cum;
6427 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6428 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6429 int first_reg_offset;
6430 alias_set_type set;
4697a36c 6431
f31bf321 6432 /* Skip the last named argument. */
d34c5b80 6433 next_cum = *cum;
594a51fe 6434 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6435
f607bc57 6436 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6437 {
5b667039
JJ
6438 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6439
60e2d0ca 6440 if (! no_rtl)
5b667039
JJ
6441 {
6442 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6443 HOST_WIDE_INT offset = 0;
6444
6445 /* Try to optimize the size of the varargs save area.
6446 The ABI requires that ap.reg_save_area is doubleword
6447 aligned, but we don't need to allocate space for all
6448 the bytes, only those to which we actually will save
6449 anything. */
6450 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6451 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6452 if (TARGET_HARD_FLOAT && TARGET_FPRS
6453 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6454 && cfun->va_list_fpr_size)
6455 {
6456 if (gpr_reg_num)
6457 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6458 * UNITS_PER_FP_WORD;
6459 if (cfun->va_list_fpr_size
6460 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6461 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6462 else
6463 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6464 * UNITS_PER_FP_WORD;
6465 }
6466 if (gpr_reg_num)
6467 {
6468 offset = -((first_reg_offset * reg_size) & ~7);
6469 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6470 {
6471 gpr_reg_num = cfun->va_list_gpr_size;
6472 if (reg_size == 4 && (first_reg_offset & 1))
6473 gpr_reg_num++;
6474 }
6475 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6476 }
6477 else if (fpr_size)
6478 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6479 * UNITS_PER_FP_WORD
6480 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6481
5b667039
JJ
6482 if (gpr_size + fpr_size)
6483 {
6484 rtx reg_save_area
6485 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6486 gcc_assert (GET_CODE (reg_save_area) == MEM);
6487 reg_save_area = XEXP (reg_save_area, 0);
6488 if (GET_CODE (reg_save_area) == PLUS)
6489 {
6490 gcc_assert (XEXP (reg_save_area, 0)
6491 == virtual_stack_vars_rtx);
6492 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6493 offset += INTVAL (XEXP (reg_save_area, 1));
6494 }
6495 else
6496 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6497 }
6498
6499 cfun->machine->varargs_save_offset = offset;
6500 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6501 }
4697a36c 6502 }
60e2d0ca 6503 else
4697a36c 6504 {
d34c5b80 6505 first_reg_offset = next_cum.words;
4cc833b7 6506 save_area = virtual_incoming_args_rtx;
4697a36c 6507
fe984136 6508 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6509 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6510 }
4697a36c 6511
dfafc897 6512 set = get_varargs_alias_set ();
9d30f3c1
JJ
6513 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6514 && cfun->va_list_gpr_size)
4cc833b7 6515 {
9d30f3c1
JJ
6516 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6517
6518 if (va_list_gpr_counter_field)
6519 {
6520 /* V4 va_list_gpr_size counts number of registers needed. */
6521 if (nregs > cfun->va_list_gpr_size)
6522 nregs = cfun->va_list_gpr_size;
6523 }
6524 else
6525 {
6526 /* char * va_list instead counts number of bytes needed. */
6527 if (nregs > cfun->va_list_gpr_size / reg_size)
6528 nregs = cfun->va_list_gpr_size / reg_size;
6529 }
6530
dfafc897 6531 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6532 plus_constant (save_area,
13e2e16e
DE
6533 first_reg_offset * reg_size));
6534 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6535 set_mem_alias_set (mem, set);
8ac61af7 6536 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6537
f676971a 6538 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6539 nregs);
4697a36c
MM
6540 }
6541
4697a36c 6542 /* Save FP registers if needed. */
f607bc57 6543 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6544 && TARGET_HARD_FLOAT && TARGET_FPRS
6545 && ! no_rtl
9d30f3c1
JJ
6546 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6547 && cfun->va_list_fpr_size)
4697a36c 6548 {
9d30f3c1 6549 int fregno = next_cum.fregno, nregs;
9ebbca7d 6550 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6551 rtx lab = gen_label_rtx ();
5b667039
JJ
6552 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6553 * UNITS_PER_FP_WORD);
4697a36c 6554
c4ad648e
AM
6555 emit_jump_insn
6556 (gen_rtx_SET (VOIDmode,
6557 pc_rtx,
6558 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6559 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6560 const0_rtx),
39403d82 6561 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6562 pc_rtx)));
6563
9d30f3c1
JJ
6564 for (nregs = 0;
6565 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6566 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6567 {
5496b36f 6568 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6569 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6570 set_mem_alias_set (mem, set);
94ff898d 6571 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6572 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6573 }
4cc833b7
RH
6574
6575 emit_label (lab);
4697a36c 6576 }
4697a36c 6577}
4697a36c 6578
dfafc897 6579/* Create the va_list data type. */
2c4974b7 6580
c35d187f
RH
6581static tree
6582rs6000_build_builtin_va_list (void)
dfafc897 6583{
64c2816f 6584 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6585
9ebbca7d
GK
6586 /* For AIX, prefer 'char *' because that's what the system
6587 header files like. */
f607bc57 6588 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6589 return build_pointer_type (char_type_node);
dfafc897 6590
f1e639b1 6591 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6592 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6593
f676971a 6594 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6595 unsigned_char_type_node);
f676971a 6596 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6597 unsigned_char_type_node);
64c2816f
DT
6598 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6599 every user file. */
6600 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6601 short_unsigned_type_node);
dfafc897
FS
6602 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6603 ptr_type_node);
6604 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6605 ptr_type_node);
6606
9d30f3c1
JJ
6607 va_list_gpr_counter_field = f_gpr;
6608 va_list_fpr_counter_field = f_fpr;
6609
dfafc897
FS
6610 DECL_FIELD_CONTEXT (f_gpr) = record;
6611 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6612 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6613 DECL_FIELD_CONTEXT (f_ovf) = record;
6614 DECL_FIELD_CONTEXT (f_sav) = record;
6615
bab45a51
FS
6616 TREE_CHAIN (record) = type_decl;
6617 TYPE_NAME (record) = type_decl;
dfafc897
FS
6618 TYPE_FIELDS (record) = f_gpr;
6619 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6620 TREE_CHAIN (f_fpr) = f_res;
6621 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6622 TREE_CHAIN (f_ovf) = f_sav;
6623
6624 layout_type (record);
6625
6626 /* The correct type is an array type of one element. */
6627 return build_array_type (record, build_index_type (size_zero_node));
6628}
6629
6630/* Implement va_start. */
6631
d7bd8aeb 6632static void
a2369ed3 6633rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6634{
dfafc897 6635 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6636 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6637 tree gpr, fpr, ovf, sav, t;
2c4974b7 6638
dfafc897 6639 /* Only SVR4 needs something special. */
f607bc57 6640 if (DEFAULT_ABI != ABI_V4)
dfafc897 6641 {
e5faf155 6642 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6643 return;
6644 }
6645
973a648b 6646 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6647 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6648 f_res = TREE_CHAIN (f_fpr);
6649 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6650 f_sav = TREE_CHAIN (f_ovf);
6651
872a65b5 6652 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6653 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6654 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6655 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6656 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6657
6658 /* Count number of gp and fp argument registers used. */
38173d38
JH
6659 words = crtl->args.info.words;
6660 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6661 GP_ARG_NUM_REG);
38173d38 6662 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6663 FP_ARG_NUM_REG);
dfafc897
FS
6664
6665 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6666 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6667 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6668 words, n_gpr, n_fpr);
dfafc897 6669
9d30f3c1
JJ
6670 if (cfun->va_list_gpr_size)
6671 {
07beea0d 6672 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6673 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6674 TREE_SIDE_EFFECTS (t) = 1;
6675 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6676 }
58c8adc1 6677
9d30f3c1
JJ
6678 if (cfun->va_list_fpr_size)
6679 {
07beea0d 6680 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6681 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6682 TREE_SIDE_EFFECTS (t) = 1;
6683 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6684 }
dfafc897
FS
6685
6686 /* Find the overflow area. */
6687 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6688 if (words != 0)
5be014d5
AP
6689 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6690 size_int (words * UNITS_PER_WORD));
07beea0d 6691 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6692 TREE_SIDE_EFFECTS (t) = 1;
6693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6694
9d30f3c1
JJ
6695 /* If there were no va_arg invocations, don't set up the register
6696 save area. */
6697 if (!cfun->va_list_gpr_size
6698 && !cfun->va_list_fpr_size
6699 && n_gpr < GP_ARG_NUM_REG
6700 && n_fpr < FP_ARG_V4_MAX_REG)
6701 return;
6702
dfafc897
FS
6703 /* Find the register save area. */
6704 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6705 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6706 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6707 size_int (cfun->machine->varargs_save_offset));
07beea0d 6708 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6709 TREE_SIDE_EFFECTS (t) = 1;
6710 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6711}
6712
6713/* Implement va_arg. */
6714
23a60a04
JM
6715tree
6716rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6717{
cd3ce9b4
JM
6718 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6719 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6720 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6721 tree lab_false, lab_over, addr;
6722 int align;
6723 tree ptrtype = build_pointer_type (type);
7393f7f8 6724 int regalign = 0;
cd3ce9b4 6725
08b0dc1b
RH
6726 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6727 {
6728 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6729 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6730 }
6731
cd3ce9b4
JM
6732 if (DEFAULT_ABI != ABI_V4)
6733 {
08b0dc1b 6734 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6735 {
6736 tree elem_type = TREE_TYPE (type);
6737 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6738 int elem_size = GET_MODE_SIZE (elem_mode);
6739
6740 if (elem_size < UNITS_PER_WORD)
6741 {
23a60a04 6742 tree real_part, imag_part;
cd3ce9b4
JM
6743 tree post = NULL_TREE;
6744
23a60a04
JM
6745 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6746 &post);
6747 /* Copy the value into a temporary, lest the formal temporary
6748 be reused out from under us. */
6749 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6750 append_to_statement_list (post, pre_p);
6751
23a60a04
JM
6752 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6753 post_p);
cd3ce9b4 6754
47a25a46 6755 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6756 }
6757 }
6758
23a60a04 6759 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6760 }
6761
6762 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6763 f_fpr = TREE_CHAIN (f_gpr);
6764 f_res = TREE_CHAIN (f_fpr);
6765 f_ovf = TREE_CHAIN (f_res);
6766 f_sav = TREE_CHAIN (f_ovf);
6767
872a65b5 6768 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6769 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6770 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6771 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6772 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6773
6774 size = int_size_in_bytes (type);
6775 rsize = (size + 3) / 4;
6776 align = 1;
6777
08b0dc1b 6778 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6779 && (TYPE_MODE (type) == SFmode
6780 || TYPE_MODE (type) == DFmode
7393f7f8 6781 || TYPE_MODE (type) == TFmode
e41b2a33 6782 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6783 || TYPE_MODE (type) == DDmode
6784 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6785 {
6786 /* FP args go in FP registers, if present. */
cd3ce9b4 6787 reg = fpr;
602ea4d3 6788 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6789 sav_ofs = 8*4;
6790 sav_scale = 8;
e41b2a33 6791 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6792 align = 8;
6793 }
6794 else
6795 {
6796 /* Otherwise into GP registers. */
cd3ce9b4
JM
6797 reg = gpr;
6798 n_reg = rsize;
6799 sav_ofs = 0;
6800 sav_scale = 4;
6801 if (n_reg == 2)
6802 align = 8;
6803 }
6804
6805 /* Pull the value out of the saved registers.... */
6806
6807 lab_over = NULL;
6808 addr = create_tmp_var (ptr_type_node, "addr");
6809 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6810
6811 /* AltiVec vectors never go in registers when -mabi=altivec. */
6812 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6813 align = 16;
6814 else
6815 {
6816 lab_false = create_artificial_label ();
6817 lab_over = create_artificial_label ();
6818
6819 /* Long long and SPE vectors are aligned in the registers.
6820 As are any other 2 gpr item such as complex int due to a
6821 historical mistake. */
6822 u = reg;
602ea4d3 6823 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6824 {
7393f7f8 6825 regalign = 1;
cd3ce9b4 6826 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6827 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6828 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6829 }
7393f7f8
BE
6830 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6831 reg number is 0 for f1, so we want to make it odd. */
6832 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6833 {
6834 regalign = 1;
383e91e4
JJ
6835 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg,
6836 build_int_cst (TREE_TYPE (reg), 1));
7393f7f8
BE
6837 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6838 }
cd3ce9b4 6839
95674810 6840 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6841 t = build2 (GE_EXPR, boolean_type_node, u, t);
6842 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6843 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6844 gimplify_and_add (t, pre_p);
6845
6846 t = sav;
6847 if (sav_ofs)
5be014d5 6848 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6849
8fb632eb
ZD
6850 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6851 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6852 u = fold_convert (sizetype, u);
6853 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6854 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6855
e41b2a33
PB
6856 /* _Decimal32 varargs are located in the second word of the 64-bit
6857 FP register for 32-bit binaries. */
6858 if (!TARGET_POWERPC64 && TYPE_MODE (type) == SDmode)
6859 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6860
07beea0d 6861 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6862 gimplify_and_add (t, pre_p);
6863
6864 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6865 gimplify_and_add (t, pre_p);
6866
6867 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6868 append_to_statement_list (t, pre_p);
6869
7393f7f8 6870 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6871 {
6872 /* Ensure that we don't find any more args in regs.
7393f7f8 6873 Alignment has taken care of for special cases. */
383e91e4
JJ
6874 t = build_gimple_modify_stmt (reg,
6875 build_int_cst (TREE_TYPE (reg), 8));
cd3ce9b4
JM
6876 gimplify_and_add (t, pre_p);
6877 }
6878 }
6879
6880 /* ... otherwise out of the overflow area. */
6881
6882 /* Care for on-stack alignment if needed. */
6883 t = ovf;
6884 if (align != 1)
6885 {
5be014d5
AP
6886 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6887 t = fold_convert (sizetype, t);
4a90aeeb 6888 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6889 size_int (-align));
6890 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6891 }
6892 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6893
07beea0d 6894 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6895 gimplify_and_add (u, pre_p);
6896
5be014d5 6897 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6898 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6899 gimplify_and_add (t, pre_p);
6900
6901 if (lab_over)
6902 {
6903 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6904 append_to_statement_list (t, pre_p);
6905 }
6906
0cfbc62b
JM
6907 if (STRICT_ALIGNMENT
6908 && (TYPE_ALIGN (type)
6909 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6910 {
6911 /* The value (of type complex double, for example) may not be
6912 aligned in memory in the saved registers, so copy via a
6913 temporary. (This is the same code as used for SPARC.) */
6914 tree tmp = create_tmp_var (type, "va_arg_tmp");
6915 tree dest_addr = build_fold_addr_expr (tmp);
6916
5039610b
SL
6917 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6918 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6919
6920 gimplify_and_add (copy, pre_p);
6921 addr = dest_addr;
6922 }
6923
08b0dc1b 6924 addr = fold_convert (ptrtype, addr);
872a65b5 6925 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6926}
6927
0ac081f6
AH
6928/* Builtins. */
6929
58646b77
PB
6930static void
6931def_builtin (int mask, const char *name, tree type, int code)
6932{
96038623 6933 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6934 {
6935 if (rs6000_builtin_decls[code])
6936 abort ();
6937
6938 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6939 add_builtin_function (name, type, code, BUILT_IN_MD,
6940 NULL, NULL_TREE);
58646b77
PB
6941 }
6942}
0ac081f6 6943
24408032
AH
6944/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6945
2212663f 6946static const struct builtin_description bdesc_3arg[] =
24408032
AH
6947{
6948 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6949 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6950 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6951 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6952 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6953 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6954 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6955 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6957 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6958 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6959 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6968 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6969 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6970 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6971
6972 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6973 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6974 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6975 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6976 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6977 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6978 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6979 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6980 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6981 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6982 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6983 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6984 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6985 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6986 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6987
6988 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6989 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6990 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6991 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6992 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6993 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6994 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6995 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6996 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6997};
2212663f 6998
95385cbb
AH
6999/* DST operations: void foo (void *, const int, const char). */
7000
7001static const struct builtin_description bdesc_dst[] =
7002{
7003 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7004 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7005 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7006 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7007
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7009 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7010 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7011 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7012};
7013
2212663f 7014/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7015
a3170dc6 7016static struct builtin_description bdesc_2arg[] =
0ac081f6 7017{
f18c054f
DB
7018 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7019 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7020 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7021 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7022 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7026 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7027 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7029 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7030 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7031 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7032 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7033 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7034 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7035 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7036 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7037 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7038 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7039 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7040 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7041 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7042 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7043 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7044 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7045 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7046 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7047 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7048 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7049 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7050 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7051 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7052 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7053 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7054 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7055 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7056 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7057 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7058 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7059 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7060 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7061 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7062 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7063 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7064 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7065 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7066 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7067 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7068 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7069 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7070 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7071 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7072 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7073 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7074 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7075 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7076 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7077 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7078 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7079 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7080 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7081 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7082 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7083 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7084 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7085 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7087 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7090 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7092 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7093 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7094 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
e83a75a7
IR
7096 { MASK_ALTIVEC, CODE_FOR_ashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7097 { MASK_ALTIVEC, CODE_FOR_ashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7098 { MASK_ALTIVEC, CODE_FOR_ashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7101 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7102 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
7104 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7105 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7106 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7107 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7108 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7109 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7110 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7111 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7112 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7113 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7114 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7115 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7117 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7118 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7119 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7120 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7121 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7122 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7123 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7124 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7125 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7126 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7127 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7128 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7129
58646b77
PB
7130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7243 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7244 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7245 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7246 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7247 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7248 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7249 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7250 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7251 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7254 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7255 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7256 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7257
96038623
DE
7258 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7259 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7260 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7261 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7262 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7263 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7264 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7265 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7266 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7267 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7268
a3170dc6
AH
7269 /* Place holder, leave as first spe builtin. */
7270 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7271 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7272 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7273 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7274 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7275 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7276 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7277 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7278 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7279 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7280 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7281 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7282 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7283 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7284 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7285 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7286 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7287 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7288 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7289 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7290 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7291 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7292 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7293 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7294 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7295 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7296 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7297 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7298 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7299 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7300 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7301 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7302 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7303 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7304 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7305 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7306 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7307 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7308 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7309 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7310 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7311 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7312 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7313 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7314 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7315 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7316 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7317 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7318 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7319 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7320 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7321 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7322 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7323 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7324 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7325 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7326 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7327 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7328 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7329 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7330 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7331 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7332 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7333 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7334 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7335 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7336 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7337 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7338 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7339 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7340 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7341 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7342 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7343 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7344 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7345 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7346 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7347 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7348 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7349 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7350 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7351 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7352 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7353 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7354 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7355 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7356 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7357 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7358 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7359 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7360 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7361 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7362 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7363 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7364 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7365 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7366 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7367 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7368 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7369 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7370 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7371 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7372 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7373 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7374 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7375 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7376 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7377 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7378 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7379
7380 /* SPE binary operations expecting a 5-bit unsigned literal. */
7381 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7382
7383 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7384 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7385 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7386 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7387 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7388 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7389 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7390 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7391 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7392 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7393 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7394 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7395 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7396 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7397 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7398 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7399 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7400 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7401 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7402 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7403 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7404 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7405 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7406 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7407 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7408 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7409
7410 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7411 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7412};
7413
7414/* AltiVec predicates. */
7415
7416struct builtin_description_predicates
7417{
7418 const unsigned int mask;
7419 const enum insn_code icode;
7420 const char *opcode;
7421 const char *const name;
7422 const enum rs6000_builtins code;
7423};
7424
7425static const struct builtin_description_predicates bdesc_altivec_preds[] =
7426{
7427 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7428 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7429 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7430 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7431 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7432 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7433 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7434 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7435 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7436 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7437 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7438 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7439 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7440
7441 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7442 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7443 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7444};
24408032 7445
a3170dc6
AH
7446/* SPE predicates. */
7447static struct builtin_description bdesc_spe_predicates[] =
7448{
7449 /* Place-holder. Leave as first. */
7450 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7451 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7452 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7453 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7454 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7455 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7456 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7457 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7458 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7459 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7460 /* Place-holder. Leave as last. */
7461 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7462};
7463
7464/* SPE evsel predicates. */
7465static struct builtin_description bdesc_spe_evsel[] =
7466{
7467 /* Place-holder. Leave as first. */
7468 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7469 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7470 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7471 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7472 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7473 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7474 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7475 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7476 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7477 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7478 /* Place-holder. Leave as last. */
7479 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7480};
7481
96038623
DE
7482/* PAIRED predicates. */
7483static const struct builtin_description bdesc_paired_preds[] =
7484{
7485 /* Place-holder. Leave as first. */
7486 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7487 /* Place-holder. Leave as last. */
7488 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7489};
7490
b6d08ca1 7491/* ABS* operations. */
100c4561
AH
7492
7493static const struct builtin_description bdesc_abs[] =
7494{
7495 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7496 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7497 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7498 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7499 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7500 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7501 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7502};
7503
617e0e1d
DB
7504/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7505 foo (VECa). */
24408032 7506
a3170dc6 7507static struct builtin_description bdesc_1arg[] =
2212663f 7508{
617e0e1d
DB
7509 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7510 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7511 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7512 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7513 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7514 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7515 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7516 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7517 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7518 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7519 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7520 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7521 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7522 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7523 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7524 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7525 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7526
58646b77
PB
7527 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7528 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7529 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7530 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7531 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7532 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7533 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7534 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7535 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7536 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7537 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7538 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7539 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7540 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7541 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7542 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7543 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7544 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7545 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7546
a3170dc6
AH
7547 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7548 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7549 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7550 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7551 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7552 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7553 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7554 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7555 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7556 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7557 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7558 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7559 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7560 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7561 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7562 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7563 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7564 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7565 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7566 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7567 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7568 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7569 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7570 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7571 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7572 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7573 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7574 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7575 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7576 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7577
7578 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7579 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7580
7581 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7582 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7583 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7584 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7585 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7586};
7587
7588static rtx
5039610b 7589rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7590{
7591 rtx pat;
5039610b 7592 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7593 rtx op0 = expand_normal (arg0);
2212663f
DB
7594 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7595 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7596
0559cc77
DE
7597 if (icode == CODE_FOR_nothing)
7598 /* Builtin not supported on this processor. */
7599 return 0;
7600
20e26713
AH
7601 /* If we got invalid arguments bail out before generating bad rtl. */
7602 if (arg0 == error_mark_node)
9a171fcd 7603 return const0_rtx;
20e26713 7604
0559cc77
DE
7605 if (icode == CODE_FOR_altivec_vspltisb
7606 || icode == CODE_FOR_altivec_vspltish
7607 || icode == CODE_FOR_altivec_vspltisw
7608 || icode == CODE_FOR_spe_evsplatfi
7609 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7610 {
7611 /* Only allow 5-bit *signed* literals. */
b44140e7 7612 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7613 || INTVAL (op0) > 15
7614 || INTVAL (op0) < -16)
b44140e7
AH
7615 {
7616 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7617 return const0_rtx;
b44140e7 7618 }
b44140e7
AH
7619 }
7620
c62f2db5 7621 if (target == 0
2212663f
DB
7622 || GET_MODE (target) != tmode
7623 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7624 target = gen_reg_rtx (tmode);
7625
7626 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7627 op0 = copy_to_mode_reg (mode0, op0);
7628
7629 pat = GEN_FCN (icode) (target, op0);
7630 if (! pat)
7631 return 0;
7632 emit_insn (pat);
0ac081f6 7633
2212663f
DB
7634 return target;
7635}
ae4b4a02 7636
100c4561 7637static rtx
5039610b 7638altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7639{
7640 rtx pat, scratch1, scratch2;
5039610b 7641 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7642 rtx op0 = expand_normal (arg0);
100c4561
AH
7643 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7644 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7645
7646 /* If we have invalid arguments, bail out before generating bad rtl. */
7647 if (arg0 == error_mark_node)
9a171fcd 7648 return const0_rtx;
100c4561
AH
7649
7650 if (target == 0
7651 || GET_MODE (target) != tmode
7652 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7653 target = gen_reg_rtx (tmode);
7654
7655 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7656 op0 = copy_to_mode_reg (mode0, op0);
7657
7658 scratch1 = gen_reg_rtx (mode0);
7659 scratch2 = gen_reg_rtx (mode0);
7660
7661 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7662 if (! pat)
7663 return 0;
7664 emit_insn (pat);
7665
7666 return target;
7667}
7668
0ac081f6 7669static rtx
5039610b 7670rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7671{
7672 rtx pat;
5039610b
SL
7673 tree arg0 = CALL_EXPR_ARG (exp, 0);
7674 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7675 rtx op0 = expand_normal (arg0);
7676 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7677 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7678 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7679 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7680
0559cc77
DE
7681 if (icode == CODE_FOR_nothing)
7682 /* Builtin not supported on this processor. */
7683 return 0;
7684
20e26713
AH
7685 /* If we got invalid arguments bail out before generating bad rtl. */
7686 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7687 return const0_rtx;
20e26713 7688
0559cc77
DE
7689 if (icode == CODE_FOR_altivec_vcfux
7690 || icode == CODE_FOR_altivec_vcfsx
7691 || icode == CODE_FOR_altivec_vctsxs
7692 || icode == CODE_FOR_altivec_vctuxs
7693 || icode == CODE_FOR_altivec_vspltb
7694 || icode == CODE_FOR_altivec_vsplth
7695 || icode == CODE_FOR_altivec_vspltw
7696 || icode == CODE_FOR_spe_evaddiw
7697 || icode == CODE_FOR_spe_evldd
7698 || icode == CODE_FOR_spe_evldh
7699 || icode == CODE_FOR_spe_evldw
7700 || icode == CODE_FOR_spe_evlhhesplat
7701 || icode == CODE_FOR_spe_evlhhossplat
7702 || icode == CODE_FOR_spe_evlhhousplat
7703 || icode == CODE_FOR_spe_evlwhe
7704 || icode == CODE_FOR_spe_evlwhos
7705 || icode == CODE_FOR_spe_evlwhou
7706 || icode == CODE_FOR_spe_evlwhsplat
7707 || icode == CODE_FOR_spe_evlwwsplat
7708 || icode == CODE_FOR_spe_evrlwi
7709 || icode == CODE_FOR_spe_evslwi
7710 || icode == CODE_FOR_spe_evsrwis
f5119d10 7711 || icode == CODE_FOR_spe_evsubifw
0559cc77 7712 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7713 {
7714 /* Only allow 5-bit unsigned literals. */
8bb418a3 7715 STRIP_NOPS (arg1);
b44140e7
AH
7716 if (TREE_CODE (arg1) != INTEGER_CST
7717 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7718 {
7719 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7720 return const0_rtx;
b44140e7 7721 }
b44140e7
AH
7722 }
7723
c62f2db5 7724 if (target == 0
0ac081f6
AH
7725 || GET_MODE (target) != tmode
7726 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7727 target = gen_reg_rtx (tmode);
7728
7729 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7730 op0 = copy_to_mode_reg (mode0, op0);
7731 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7732 op1 = copy_to_mode_reg (mode1, op1);
7733
7734 pat = GEN_FCN (icode) (target, op0, op1);
7735 if (! pat)
7736 return 0;
7737 emit_insn (pat);
7738
7739 return target;
7740}
6525c0e7 7741
ae4b4a02 7742static rtx
f676971a 7743altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7744 tree exp, rtx target)
ae4b4a02
AH
7745{
7746 rtx pat, scratch;
5039610b
SL
7747 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7748 tree arg0 = CALL_EXPR_ARG (exp, 1);
7749 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7750 rtx op0 = expand_normal (arg0);
7751 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7752 enum machine_mode tmode = SImode;
7753 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7754 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7755 int cr6_form_int;
7756
7757 if (TREE_CODE (cr6_form) != INTEGER_CST)
7758 {
7759 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7760 return const0_rtx;
ae4b4a02
AH
7761 }
7762 else
7763 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7764
37409796 7765 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7766
7767 /* If we have invalid arguments, bail out before generating bad rtl. */
7768 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7769 return const0_rtx;
ae4b4a02
AH
7770
7771 if (target == 0
7772 || GET_MODE (target) != tmode
7773 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7774 target = gen_reg_rtx (tmode);
7775
7776 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7777 op0 = copy_to_mode_reg (mode0, op0);
7778 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7779 op1 = copy_to_mode_reg (mode1, op1);
7780
7781 scratch = gen_reg_rtx (mode0);
7782
7783 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7784 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7785 if (! pat)
7786 return 0;
7787 emit_insn (pat);
7788
7789 /* The vec_any* and vec_all* predicates use the same opcodes for two
7790 different operations, but the bits in CR6 will be different
7791 depending on what information we want. So we have to play tricks
7792 with CR6 to get the right bits out.
7793
7794 If you think this is disgusting, look at the specs for the
7795 AltiVec predicates. */
7796
c4ad648e
AM
7797 switch (cr6_form_int)
7798 {
7799 case 0:
7800 emit_insn (gen_cr6_test_for_zero (target));
7801 break;
7802 case 1:
7803 emit_insn (gen_cr6_test_for_zero_reverse (target));
7804 break;
7805 case 2:
7806 emit_insn (gen_cr6_test_for_lt (target));
7807 break;
7808 case 3:
7809 emit_insn (gen_cr6_test_for_lt_reverse (target));
7810 break;
7811 default:
7812 error ("argument 1 of __builtin_altivec_predicate is out of range");
7813 break;
7814 }
ae4b4a02
AH
7815
7816 return target;
7817}
7818
96038623
DE
7819static rtx
7820paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7821{
7822 rtx pat, addr;
7823 tree arg0 = CALL_EXPR_ARG (exp, 0);
7824 tree arg1 = CALL_EXPR_ARG (exp, 1);
7825 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7826 enum machine_mode mode0 = Pmode;
7827 enum machine_mode mode1 = Pmode;
7828 rtx op0 = expand_normal (arg0);
7829 rtx op1 = expand_normal (arg1);
7830
7831 if (icode == CODE_FOR_nothing)
7832 /* Builtin not supported on this processor. */
7833 return 0;
7834
7835 /* If we got invalid arguments bail out before generating bad rtl. */
7836 if (arg0 == error_mark_node || arg1 == error_mark_node)
7837 return const0_rtx;
7838
7839 if (target == 0
7840 || GET_MODE (target) != tmode
7841 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7842 target = gen_reg_rtx (tmode);
7843
7844 op1 = copy_to_mode_reg (mode1, op1);
7845
7846 if (op0 == const0_rtx)
7847 {
7848 addr = gen_rtx_MEM (tmode, op1);
7849 }
7850 else
7851 {
7852 op0 = copy_to_mode_reg (mode0, op0);
7853 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7854 }
7855
7856 pat = GEN_FCN (icode) (target, addr);
7857
7858 if (! pat)
7859 return 0;
7860 emit_insn (pat);
7861
7862 return target;
7863}
7864
b4a62fa0 7865static rtx
5039610b 7866altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7867{
7868 rtx pat, addr;
5039610b
SL
7869 tree arg0 = CALL_EXPR_ARG (exp, 0);
7870 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7871 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7872 enum machine_mode mode0 = Pmode;
7873 enum machine_mode mode1 = Pmode;
84217346
MD
7874 rtx op0 = expand_normal (arg0);
7875 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7876
7877 if (icode == CODE_FOR_nothing)
7878 /* Builtin not supported on this processor. */
7879 return 0;
7880
7881 /* If we got invalid arguments bail out before generating bad rtl. */
7882 if (arg0 == error_mark_node || arg1 == error_mark_node)
7883 return const0_rtx;
7884
7885 if (target == 0
7886 || GET_MODE (target) != tmode
7887 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7888 target = gen_reg_rtx (tmode);
7889
f676971a 7890 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7891
7892 if (op0 == const0_rtx)
7893 {
7894 addr = gen_rtx_MEM (tmode, op1);
7895 }
7896 else
7897 {
7898 op0 = copy_to_mode_reg (mode0, op0);
7899 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7900 }
7901
7902 pat = GEN_FCN (icode) (target, addr);
7903
7904 if (! pat)
7905 return 0;
7906 emit_insn (pat);
7907
7908 return target;
7909}
7910
61bea3b0 7911static rtx
5039610b 7912spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7913{
5039610b
SL
7914 tree arg0 = CALL_EXPR_ARG (exp, 0);
7915 tree arg1 = CALL_EXPR_ARG (exp, 1);
7916 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7917 rtx op0 = expand_normal (arg0);
7918 rtx op1 = expand_normal (arg1);
7919 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7920 rtx pat;
7921 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7922 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7923 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7924
7925 /* Invalid arguments. Bail before doing anything stoopid! */
7926 if (arg0 == error_mark_node
7927 || arg1 == error_mark_node
7928 || arg2 == error_mark_node)
7929 return const0_rtx;
7930
7931 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7932 op0 = copy_to_mode_reg (mode2, op0);
7933 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7934 op1 = copy_to_mode_reg (mode0, op1);
7935 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7936 op2 = copy_to_mode_reg (mode1, op2);
7937
7938 pat = GEN_FCN (icode) (op1, op2, op0);
7939 if (pat)
7940 emit_insn (pat);
7941 return NULL_RTX;
7942}
7943
96038623
DE
7944static rtx
7945paired_expand_stv_builtin (enum insn_code icode, tree exp)
7946{
7947 tree arg0 = CALL_EXPR_ARG (exp, 0);
7948 tree arg1 = CALL_EXPR_ARG (exp, 1);
7949 tree arg2 = CALL_EXPR_ARG (exp, 2);
7950 rtx op0 = expand_normal (arg0);
7951 rtx op1 = expand_normal (arg1);
7952 rtx op2 = expand_normal (arg2);
7953 rtx pat, addr;
7954 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7955 enum machine_mode mode1 = Pmode;
7956 enum machine_mode mode2 = Pmode;
7957
7958 /* Invalid arguments. Bail before doing anything stoopid! */
7959 if (arg0 == error_mark_node
7960 || arg1 == error_mark_node
7961 || arg2 == error_mark_node)
7962 return const0_rtx;
7963
7964 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7965 op0 = copy_to_mode_reg (tmode, op0);
7966
7967 op2 = copy_to_mode_reg (mode2, op2);
7968
7969 if (op1 == const0_rtx)
7970 {
7971 addr = gen_rtx_MEM (tmode, op2);
7972 }
7973 else
7974 {
7975 op1 = copy_to_mode_reg (mode1, op1);
7976 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7977 }
7978
7979 pat = GEN_FCN (icode) (addr, op0);
7980 if (pat)
7981 emit_insn (pat);
7982 return NULL_RTX;
7983}
7984
6525c0e7 7985static rtx
5039610b 7986altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7987{
5039610b
SL
7988 tree arg0 = CALL_EXPR_ARG (exp, 0);
7989 tree arg1 = CALL_EXPR_ARG (exp, 1);
7990 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7991 rtx op0 = expand_normal (arg0);
7992 rtx op1 = expand_normal (arg1);
7993 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7994 rtx pat, addr;
7995 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7996 enum machine_mode mode1 = Pmode;
7997 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7998
7999 /* Invalid arguments. Bail before doing anything stoopid! */
8000 if (arg0 == error_mark_node
8001 || arg1 == error_mark_node
8002 || arg2 == error_mark_node)
9a171fcd 8003 return const0_rtx;
6525c0e7 8004
b4a62fa0
SB
8005 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8006 op0 = copy_to_mode_reg (tmode, op0);
8007
f676971a 8008 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8009
8010 if (op1 == const0_rtx)
8011 {
8012 addr = gen_rtx_MEM (tmode, op2);
8013 }
8014 else
8015 {
8016 op1 = copy_to_mode_reg (mode1, op1);
8017 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8018 }
6525c0e7 8019
b4a62fa0 8020 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8021 if (pat)
8022 emit_insn (pat);
8023 return NULL_RTX;
8024}
8025
2212663f 8026static rtx
5039610b 8027rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8028{
8029 rtx pat;
5039610b
SL
8030 tree arg0 = CALL_EXPR_ARG (exp, 0);
8031 tree arg1 = CALL_EXPR_ARG (exp, 1);
8032 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8033 rtx op0 = expand_normal (arg0);
8034 rtx op1 = expand_normal (arg1);
8035 rtx op2 = expand_normal (arg2);
2212663f
DB
8036 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8037 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8038 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8039 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8040
774b5662
DE
8041 if (icode == CODE_FOR_nothing)
8042 /* Builtin not supported on this processor. */
8043 return 0;
8044
20e26713
AH
8045 /* If we got invalid arguments bail out before generating bad rtl. */
8046 if (arg0 == error_mark_node
8047 || arg1 == error_mark_node
8048 || arg2 == error_mark_node)
9a171fcd 8049 return const0_rtx;
20e26713 8050
aba5fb01
NS
8051 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8052 || icode == CODE_FOR_altivec_vsldoi_v4si
8053 || icode == CODE_FOR_altivec_vsldoi_v8hi
8054 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8055 {
8056 /* Only allow 4-bit unsigned literals. */
8bb418a3 8057 STRIP_NOPS (arg2);
b44140e7
AH
8058 if (TREE_CODE (arg2) != INTEGER_CST
8059 || TREE_INT_CST_LOW (arg2) & ~0xf)
8060 {
8061 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8062 return const0_rtx;
b44140e7 8063 }
b44140e7
AH
8064 }
8065
c62f2db5 8066 if (target == 0
2212663f
DB
8067 || GET_MODE (target) != tmode
8068 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8069 target = gen_reg_rtx (tmode);
8070
8071 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8072 op0 = copy_to_mode_reg (mode0, op0);
8073 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8074 op1 = copy_to_mode_reg (mode1, op1);
8075 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8076 op2 = copy_to_mode_reg (mode2, op2);
8077
49e39588
RE
8078 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8079 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8080 else
8081 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8082 if (! pat)
8083 return 0;
8084 emit_insn (pat);
8085
8086 return target;
8087}
92898235 8088
3a9b8c7e 8089/* Expand the lvx builtins. */
0ac081f6 8090static rtx
a2369ed3 8091altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8092{
5039610b 8093 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8094 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8095 tree arg0;
8096 enum machine_mode tmode, mode0;
7c3abc73 8097 rtx pat, op0;
3a9b8c7e 8098 enum insn_code icode;
92898235 8099
0ac081f6
AH
8100 switch (fcode)
8101 {
f18c054f 8102 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8103 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8104 break;
f18c054f 8105 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8106 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8107 break;
8108 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8109 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8110 break;
8111 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8112 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8113 break;
8114 default:
8115 *expandedp = false;
8116 return NULL_RTX;
8117 }
0ac081f6 8118
3a9b8c7e 8119 *expandedp = true;
f18c054f 8120
5039610b 8121 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8122 op0 = expand_normal (arg0);
3a9b8c7e
AH
8123 tmode = insn_data[icode].operand[0].mode;
8124 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8125
3a9b8c7e
AH
8126 if (target == 0
8127 || GET_MODE (target) != tmode
8128 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8129 target = gen_reg_rtx (tmode);
24408032 8130
3a9b8c7e
AH
8131 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8132 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8133
3a9b8c7e
AH
8134 pat = GEN_FCN (icode) (target, op0);
8135 if (! pat)
8136 return 0;
8137 emit_insn (pat);
8138 return target;
8139}
f18c054f 8140
3a9b8c7e
AH
8141/* Expand the stvx builtins. */
8142static rtx
f676971a 8143altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8144 bool *expandedp)
3a9b8c7e 8145{
5039610b 8146 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8147 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8148 tree arg0, arg1;
8149 enum machine_mode mode0, mode1;
7c3abc73 8150 rtx pat, op0, op1;
3a9b8c7e 8151 enum insn_code icode;
f18c054f 8152
3a9b8c7e
AH
8153 switch (fcode)
8154 {
8155 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8156 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8157 break;
8158 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8159 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8160 break;
8161 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8162 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8163 break;
8164 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8165 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8166 break;
8167 default:
8168 *expandedp = false;
8169 return NULL_RTX;
8170 }
24408032 8171
5039610b
SL
8172 arg0 = CALL_EXPR_ARG (exp, 0);
8173 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8174 op0 = expand_normal (arg0);
8175 op1 = expand_normal (arg1);
3a9b8c7e
AH
8176 mode0 = insn_data[icode].operand[0].mode;
8177 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8178
3a9b8c7e
AH
8179 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8180 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8181 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8182 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8183
3a9b8c7e
AH
8184 pat = GEN_FCN (icode) (op0, op1);
8185 if (pat)
8186 emit_insn (pat);
f18c054f 8187
3a9b8c7e
AH
8188 *expandedp = true;
8189 return NULL_RTX;
8190}
f18c054f 8191
3a9b8c7e
AH
8192/* Expand the dst builtins. */
8193static rtx
f676971a 8194altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8195 bool *expandedp)
3a9b8c7e 8196{
5039610b 8197 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8198 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8199 tree arg0, arg1, arg2;
8200 enum machine_mode mode0, mode1, mode2;
7c3abc73 8201 rtx pat, op0, op1, op2;
586de218 8202 const struct builtin_description *d;
a3170dc6 8203 size_t i;
f18c054f 8204
3a9b8c7e 8205 *expandedp = false;
f18c054f 8206
3a9b8c7e 8207 /* Handle DST variants. */
586de218 8208 d = bdesc_dst;
3a9b8c7e
AH
8209 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8210 if (d->code == fcode)
8211 {
5039610b
SL
8212 arg0 = CALL_EXPR_ARG (exp, 0);
8213 arg1 = CALL_EXPR_ARG (exp, 1);
8214 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8215 op0 = expand_normal (arg0);
8216 op1 = expand_normal (arg1);
8217 op2 = expand_normal (arg2);
3a9b8c7e
AH
8218 mode0 = insn_data[d->icode].operand[0].mode;
8219 mode1 = insn_data[d->icode].operand[1].mode;
8220 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8221
3a9b8c7e
AH
8222 /* Invalid arguments, bail out before generating bad rtl. */
8223 if (arg0 == error_mark_node
8224 || arg1 == error_mark_node
8225 || arg2 == error_mark_node)
8226 return const0_rtx;
f18c054f 8227
86e7df90 8228 *expandedp = true;
8bb418a3 8229 STRIP_NOPS (arg2);
3a9b8c7e
AH
8230 if (TREE_CODE (arg2) != INTEGER_CST
8231 || TREE_INT_CST_LOW (arg2) & ~0x3)
8232 {
9e637a26 8233 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8234 return const0_rtx;
8235 }
f18c054f 8236
3a9b8c7e 8237 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8238 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8239 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8240 op1 = copy_to_mode_reg (mode1, op1);
24408032 8241
3a9b8c7e
AH
8242 pat = GEN_FCN (d->icode) (op0, op1, op2);
8243 if (pat != 0)
8244 emit_insn (pat);
f18c054f 8245
3a9b8c7e
AH
8246 return NULL_RTX;
8247 }
f18c054f 8248
3a9b8c7e
AH
8249 return NULL_RTX;
8250}
24408032 8251
7a4eca66
DE
8252/* Expand vec_init builtin. */
8253static rtx
5039610b 8254altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8255{
8256 enum machine_mode tmode = TYPE_MODE (type);
8257 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8258 int i, n_elt = GET_MODE_NUNITS (tmode);
8259 rtvec v = rtvec_alloc (n_elt);
8260
8261 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8262 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8263
5039610b 8264 for (i = 0; i < n_elt; ++i)
7a4eca66 8265 {
5039610b 8266 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8267 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8268 }
8269
7a4eca66
DE
8270 if (!target || !register_operand (target, tmode))
8271 target = gen_reg_rtx (tmode);
8272
8273 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8274 return target;
8275}
8276
8277/* Return the integer constant in ARG. Constrain it to be in the range
8278 of the subparts of VEC_TYPE; issue an error if not. */
8279
8280static int
8281get_element_number (tree vec_type, tree arg)
8282{
8283 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8284
8285 if (!host_integerp (arg, 1)
8286 || (elt = tree_low_cst (arg, 1), elt > max))
8287 {
8288 error ("selector must be an integer constant in the range 0..%wi", max);
8289 return 0;
8290 }
8291
8292 return elt;
8293}
8294
8295/* Expand vec_set builtin. */
8296static rtx
5039610b 8297altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8298{
8299 enum machine_mode tmode, mode1;
8300 tree arg0, arg1, arg2;
8301 int elt;
8302 rtx op0, op1;
8303
5039610b
SL
8304 arg0 = CALL_EXPR_ARG (exp, 0);
8305 arg1 = CALL_EXPR_ARG (exp, 1);
8306 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8307
8308 tmode = TYPE_MODE (TREE_TYPE (arg0));
8309 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8310 gcc_assert (VECTOR_MODE_P (tmode));
8311
8312 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8313 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8314 elt = get_element_number (TREE_TYPE (arg0), arg2);
8315
8316 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8317 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8318
8319 op0 = force_reg (tmode, op0);
8320 op1 = force_reg (mode1, op1);
8321
8322 rs6000_expand_vector_set (op0, op1, elt);
8323
8324 return op0;
8325}
8326
8327/* Expand vec_ext builtin. */
8328static rtx
5039610b 8329altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8330{
8331 enum machine_mode tmode, mode0;
8332 tree arg0, arg1;
8333 int elt;
8334 rtx op0;
8335
5039610b
SL
8336 arg0 = CALL_EXPR_ARG (exp, 0);
8337 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8338
84217346 8339 op0 = expand_normal (arg0);
7a4eca66
DE
8340 elt = get_element_number (TREE_TYPE (arg0), arg1);
8341
8342 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8343 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8344 gcc_assert (VECTOR_MODE_P (mode0));
8345
8346 op0 = force_reg (mode0, op0);
8347
8348 if (optimize || !target || !register_operand (target, tmode))
8349 target = gen_reg_rtx (tmode);
8350
8351 rs6000_expand_vector_extract (target, op0, elt);
8352
8353 return target;
8354}
8355
3a9b8c7e
AH
8356/* Expand the builtin in EXP and store the result in TARGET. Store
8357 true in *EXPANDEDP if we found a builtin to expand. */
8358static rtx
a2369ed3 8359altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8360{
586de218
KG
8361 const struct builtin_description *d;
8362 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8363 size_t i;
8364 enum insn_code icode;
5039610b 8365 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8366 tree arg0;
8367 rtx op0, pat;
8368 enum machine_mode tmode, mode0;
3a9b8c7e 8369 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8370
58646b77
PB
8371 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8372 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8373 {
8374 *expandedp = true;
ea40ba9c 8375 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8376 return const0_rtx;
8377 }
8378
3a9b8c7e
AH
8379 target = altivec_expand_ld_builtin (exp, target, expandedp);
8380 if (*expandedp)
8381 return target;
0ac081f6 8382
3a9b8c7e
AH
8383 target = altivec_expand_st_builtin (exp, target, expandedp);
8384 if (*expandedp)
8385 return target;
8386
8387 target = altivec_expand_dst_builtin (exp, target, expandedp);
8388 if (*expandedp)
8389 return target;
8390
8391 *expandedp = true;
95385cbb 8392
3a9b8c7e
AH
8393 switch (fcode)
8394 {
6525c0e7 8395 case ALTIVEC_BUILTIN_STVX:
5039610b 8396 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8397 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8398 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8399 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8400 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8401 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8402 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8403 case ALTIVEC_BUILTIN_STVXL:
5039610b 8404 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8405
95385cbb
AH
8406 case ALTIVEC_BUILTIN_MFVSCR:
8407 icode = CODE_FOR_altivec_mfvscr;
8408 tmode = insn_data[icode].operand[0].mode;
8409
8410 if (target == 0
8411 || GET_MODE (target) != tmode
8412 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8413 target = gen_reg_rtx (tmode);
f676971a 8414
95385cbb 8415 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8416 if (! pat)
8417 return 0;
8418 emit_insn (pat);
95385cbb
AH
8419 return target;
8420
8421 case ALTIVEC_BUILTIN_MTVSCR:
8422 icode = CODE_FOR_altivec_mtvscr;
5039610b 8423 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8424 op0 = expand_normal (arg0);
95385cbb
AH
8425 mode0 = insn_data[icode].operand[0].mode;
8426
8427 /* If we got invalid arguments bail out before generating bad rtl. */
8428 if (arg0 == error_mark_node)
9a171fcd 8429 return const0_rtx;
95385cbb
AH
8430
8431 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8432 op0 = copy_to_mode_reg (mode0, op0);
8433
8434 pat = GEN_FCN (icode) (op0);
8435 if (pat)
8436 emit_insn (pat);
8437 return NULL_RTX;
3a9b8c7e 8438
95385cbb
AH
8439 case ALTIVEC_BUILTIN_DSSALL:
8440 emit_insn (gen_altivec_dssall ());
8441 return NULL_RTX;
8442
8443 case ALTIVEC_BUILTIN_DSS:
8444 icode = CODE_FOR_altivec_dss;
5039610b 8445 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8446 STRIP_NOPS (arg0);
84217346 8447 op0 = expand_normal (arg0);
95385cbb
AH
8448 mode0 = insn_data[icode].operand[0].mode;
8449
8450 /* If we got invalid arguments bail out before generating bad rtl. */
8451 if (arg0 == error_mark_node)
9a171fcd 8452 return const0_rtx;
95385cbb 8453
b44140e7
AH
8454 if (TREE_CODE (arg0) != INTEGER_CST
8455 || TREE_INT_CST_LOW (arg0) & ~0x3)
8456 {
8457 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8458 return const0_rtx;
b44140e7
AH
8459 }
8460
95385cbb
AH
8461 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8462 op0 = copy_to_mode_reg (mode0, op0);
8463
8464 emit_insn (gen_altivec_dss (op0));
0ac081f6 8465 return NULL_RTX;
7a4eca66
DE
8466
8467 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8468 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8469 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8470 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8471 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8472
8473 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8474 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8475 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8476 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8477 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8478
8479 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8480 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8481 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8482 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8483 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8484
8485 default:
8486 break;
8487 /* Fall through. */
0ac081f6 8488 }
24408032 8489
100c4561 8490 /* Expand abs* operations. */
586de218 8491 d = bdesc_abs;
ca7558fc 8492 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8493 if (d->code == fcode)
5039610b 8494 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8495
ae4b4a02 8496 /* Expand the AltiVec predicates. */
586de218 8497 dp = bdesc_altivec_preds;
ca7558fc 8498 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8499 if (dp->code == fcode)
c4ad648e 8500 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8501 exp, target);
ae4b4a02 8502
6525c0e7
AH
8503 /* LV* are funky. We initialized them differently. */
8504 switch (fcode)
8505 {
8506 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8507 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8508 exp, target);
6525c0e7 8509 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8510 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8511 exp, target);
6525c0e7 8512 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8513 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8514 exp, target);
6525c0e7 8515 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8516 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8517 exp, target);
6525c0e7 8518 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8519 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8520 exp, target);
6525c0e7 8521 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8522 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8523 exp, target);
6525c0e7 8524 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8525 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8526 exp, target);
6525c0e7
AH
8527 default:
8528 break;
8529 /* Fall through. */
8530 }
95385cbb 8531
92898235 8532 *expandedp = false;
0ac081f6
AH
8533 return NULL_RTX;
8534}
8535
96038623
DE
8536/* Expand the builtin in EXP and store the result in TARGET. Store
8537 true in *EXPANDEDP if we found a builtin to expand. */
8538static rtx
8539paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8540{
8541 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8542 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8543 const struct builtin_description *d;
96038623
DE
8544 size_t i;
8545
8546 *expandedp = true;
8547
8548 switch (fcode)
8549 {
8550 case PAIRED_BUILTIN_STX:
8551 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8552 case PAIRED_BUILTIN_LX:
8553 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8554 default:
8555 break;
8556 /* Fall through. */
8557 }
8558
8559 /* Expand the paired predicates. */
23a651fc 8560 d = bdesc_paired_preds;
96038623
DE
8561 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8562 if (d->code == fcode)
8563 return paired_expand_predicate_builtin (d->icode, exp, target);
8564
8565 *expandedp = false;
8566 return NULL_RTX;
8567}
8568
a3170dc6
AH
8569/* Binops that need to be initialized manually, but can be expanded
8570 automagically by rs6000_expand_binop_builtin. */
8571static struct builtin_description bdesc_2arg_spe[] =
8572{
8573 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8574 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8575 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8576 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8577 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8578 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8579 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8580 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8581 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8582 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8583 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8584 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8585 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8586 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8587 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8588 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8589 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8590 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8591 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8592 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8593 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8594 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8595};
8596
8597/* Expand the builtin in EXP and store the result in TARGET. Store
8598 true in *EXPANDEDP if we found a builtin to expand.
8599
8600 This expands the SPE builtins that are not simple unary and binary
8601 operations. */
8602static rtx
a2369ed3 8603spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8604{
5039610b 8605 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8606 tree arg1, arg0;
8607 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8608 enum insn_code icode;
8609 enum machine_mode tmode, mode0;
8610 rtx pat, op0;
8611 struct builtin_description *d;
8612 size_t i;
8613
8614 *expandedp = true;
8615
8616 /* Syntax check for a 5-bit unsigned immediate. */
8617 switch (fcode)
8618 {
8619 case SPE_BUILTIN_EVSTDD:
8620 case SPE_BUILTIN_EVSTDH:
8621 case SPE_BUILTIN_EVSTDW:
8622 case SPE_BUILTIN_EVSTWHE:
8623 case SPE_BUILTIN_EVSTWHO:
8624 case SPE_BUILTIN_EVSTWWE:
8625 case SPE_BUILTIN_EVSTWWO:
5039610b 8626 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8627 if (TREE_CODE (arg1) != INTEGER_CST
8628 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8629 {
8630 error ("argument 2 must be a 5-bit unsigned literal");
8631 return const0_rtx;
8632 }
8633 break;
8634 default:
8635 break;
8636 }
8637
00332c9f
AH
8638 /* The evsplat*i instructions are not quite generic. */
8639 switch (fcode)
8640 {
8641 case SPE_BUILTIN_EVSPLATFI:
8642 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8643 exp, target);
00332c9f
AH
8644 case SPE_BUILTIN_EVSPLATI:
8645 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8646 exp, target);
00332c9f
AH
8647 default:
8648 break;
8649 }
8650
a3170dc6
AH
8651 d = (struct builtin_description *) bdesc_2arg_spe;
8652 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8653 if (d->code == fcode)
5039610b 8654 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8655
8656 d = (struct builtin_description *) bdesc_spe_predicates;
8657 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8658 if (d->code == fcode)
5039610b 8659 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8660
8661 d = (struct builtin_description *) bdesc_spe_evsel;
8662 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8663 if (d->code == fcode)
5039610b 8664 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8665
8666 switch (fcode)
8667 {
8668 case SPE_BUILTIN_EVSTDDX:
5039610b 8669 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8670 case SPE_BUILTIN_EVSTDHX:
5039610b 8671 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8672 case SPE_BUILTIN_EVSTDWX:
5039610b 8673 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8674 case SPE_BUILTIN_EVSTWHEX:
5039610b 8675 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8676 case SPE_BUILTIN_EVSTWHOX:
5039610b 8677 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8678 case SPE_BUILTIN_EVSTWWEX:
5039610b 8679 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8680 case SPE_BUILTIN_EVSTWWOX:
5039610b 8681 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8682 case SPE_BUILTIN_EVSTDD:
5039610b 8683 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8684 case SPE_BUILTIN_EVSTDH:
5039610b 8685 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8686 case SPE_BUILTIN_EVSTDW:
5039610b 8687 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8688 case SPE_BUILTIN_EVSTWHE:
5039610b 8689 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8690 case SPE_BUILTIN_EVSTWHO:
5039610b 8691 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8692 case SPE_BUILTIN_EVSTWWE:
5039610b 8693 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8694 case SPE_BUILTIN_EVSTWWO:
5039610b 8695 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8696 case SPE_BUILTIN_MFSPEFSCR:
8697 icode = CODE_FOR_spe_mfspefscr;
8698 tmode = insn_data[icode].operand[0].mode;
8699
8700 if (target == 0
8701 || GET_MODE (target) != tmode
8702 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8703 target = gen_reg_rtx (tmode);
f676971a 8704
a3170dc6
AH
8705 pat = GEN_FCN (icode) (target);
8706 if (! pat)
8707 return 0;
8708 emit_insn (pat);
8709 return target;
8710 case SPE_BUILTIN_MTSPEFSCR:
8711 icode = CODE_FOR_spe_mtspefscr;
5039610b 8712 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8713 op0 = expand_normal (arg0);
a3170dc6
AH
8714 mode0 = insn_data[icode].operand[0].mode;
8715
8716 if (arg0 == error_mark_node)
8717 return const0_rtx;
8718
8719 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8720 op0 = copy_to_mode_reg (mode0, op0);
8721
8722 pat = GEN_FCN (icode) (op0);
8723 if (pat)
8724 emit_insn (pat);
8725 return NULL_RTX;
8726 default:
8727 break;
8728 }
8729
8730 *expandedp = false;
8731 return NULL_RTX;
8732}
8733
96038623
DE
8734static rtx
8735paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8736{
8737 rtx pat, scratch, tmp;
8738 tree form = CALL_EXPR_ARG (exp, 0);
8739 tree arg0 = CALL_EXPR_ARG (exp, 1);
8740 tree arg1 = CALL_EXPR_ARG (exp, 2);
8741 rtx op0 = expand_normal (arg0);
8742 rtx op1 = expand_normal (arg1);
8743 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8744 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8745 int form_int;
8746 enum rtx_code code;
8747
8748 if (TREE_CODE (form) != INTEGER_CST)
8749 {
8750 error ("argument 1 of __builtin_paired_predicate must be a constant");
8751 return const0_rtx;
8752 }
8753 else
8754 form_int = TREE_INT_CST_LOW (form);
8755
8756 gcc_assert (mode0 == mode1);
8757
8758 if (arg0 == error_mark_node || arg1 == error_mark_node)
8759 return const0_rtx;
8760
8761 if (target == 0
8762 || GET_MODE (target) != SImode
8763 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8764 target = gen_reg_rtx (SImode);
8765 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8766 op0 = copy_to_mode_reg (mode0, op0);
8767 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8768 op1 = copy_to_mode_reg (mode1, op1);
8769
8770 scratch = gen_reg_rtx (CCFPmode);
8771
8772 pat = GEN_FCN (icode) (scratch, op0, op1);
8773 if (!pat)
8774 return const0_rtx;
8775
8776 emit_insn (pat);
8777
8778 switch (form_int)
8779 {
8780 /* LT bit. */
8781 case 0:
8782 code = LT;
8783 break;
8784 /* GT bit. */
8785 case 1:
8786 code = GT;
8787 break;
8788 /* EQ bit. */
8789 case 2:
8790 code = EQ;
8791 break;
8792 /* UN bit. */
8793 case 3:
8794 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8795 return target;
8796 default:
8797 error ("argument 1 of __builtin_paired_predicate is out of range");
8798 return const0_rtx;
8799 }
8800
8801 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8802 emit_move_insn (target, tmp);
8803 return target;
8804}
8805
a3170dc6 8806static rtx
5039610b 8807spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8808{
8809 rtx pat, scratch, tmp;
5039610b
SL
8810 tree form = CALL_EXPR_ARG (exp, 0);
8811 tree arg0 = CALL_EXPR_ARG (exp, 1);
8812 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8813 rtx op0 = expand_normal (arg0);
8814 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8815 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8816 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8817 int form_int;
8818 enum rtx_code code;
8819
8820 if (TREE_CODE (form) != INTEGER_CST)
8821 {
8822 error ("argument 1 of __builtin_spe_predicate must be a constant");
8823 return const0_rtx;
8824 }
8825 else
8826 form_int = TREE_INT_CST_LOW (form);
8827
37409796 8828 gcc_assert (mode0 == mode1);
a3170dc6
AH
8829
8830 if (arg0 == error_mark_node || arg1 == error_mark_node)
8831 return const0_rtx;
8832
8833 if (target == 0
8834 || GET_MODE (target) != SImode
8835 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8836 target = gen_reg_rtx (SImode);
8837
8838 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8839 op0 = copy_to_mode_reg (mode0, op0);
8840 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8841 op1 = copy_to_mode_reg (mode1, op1);
8842
8843 scratch = gen_reg_rtx (CCmode);
8844
8845 pat = GEN_FCN (icode) (scratch, op0, op1);
8846 if (! pat)
8847 return const0_rtx;
8848 emit_insn (pat);
8849
8850 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8851 _lower_. We use one compare, but look in different bits of the
8852 CR for each variant.
8853
8854 There are 2 elements in each SPE simd type (upper/lower). The CR
8855 bits are set as follows:
8856
8857 BIT0 | BIT 1 | BIT 2 | BIT 3
8858 U | L | (U | L) | (U & L)
8859
8860 So, for an "all" relationship, BIT 3 would be set.
8861 For an "any" relationship, BIT 2 would be set. Etc.
8862
8863 Following traditional nomenclature, these bits map to:
8864
8865 BIT0 | BIT 1 | BIT 2 | BIT 3
8866 LT | GT | EQ | OV
8867
8868 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8869 */
8870
8871 switch (form_int)
8872 {
8873 /* All variant. OV bit. */
8874 case 0:
8875 /* We need to get to the OV bit, which is the ORDERED bit. We
8876 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8877 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8878 So let's just use another pattern. */
8879 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8880 return target;
8881 /* Any variant. EQ bit. */
8882 case 1:
8883 code = EQ;
8884 break;
8885 /* Upper variant. LT bit. */
8886 case 2:
8887 code = LT;
8888 break;
8889 /* Lower variant. GT bit. */
8890 case 3:
8891 code = GT;
8892 break;
8893 default:
8894 error ("argument 1 of __builtin_spe_predicate is out of range");
8895 return const0_rtx;
8896 }
8897
8898 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8899 emit_move_insn (target, tmp);
8900
8901 return target;
8902}
8903
8904/* The evsel builtins look like this:
8905
8906 e = __builtin_spe_evsel_OP (a, b, c, d);
8907
8908 and work like this:
8909
8910 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8911 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8912*/
8913
8914static rtx
5039610b 8915spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8916{
8917 rtx pat, scratch;
5039610b
SL
8918 tree arg0 = CALL_EXPR_ARG (exp, 0);
8919 tree arg1 = CALL_EXPR_ARG (exp, 1);
8920 tree arg2 = CALL_EXPR_ARG (exp, 2);
8921 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8922 rtx op0 = expand_normal (arg0);
8923 rtx op1 = expand_normal (arg1);
8924 rtx op2 = expand_normal (arg2);
8925 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8926 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8927 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8928
37409796 8929 gcc_assert (mode0 == mode1);
a3170dc6
AH
8930
8931 if (arg0 == error_mark_node || arg1 == error_mark_node
8932 || arg2 == error_mark_node || arg3 == error_mark_node)
8933 return const0_rtx;
8934
8935 if (target == 0
8936 || GET_MODE (target) != mode0
8937 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8938 target = gen_reg_rtx (mode0);
8939
8940 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8941 op0 = copy_to_mode_reg (mode0, op0);
8942 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8943 op1 = copy_to_mode_reg (mode0, op1);
8944 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8945 op2 = copy_to_mode_reg (mode0, op2);
8946 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8947 op3 = copy_to_mode_reg (mode0, op3);
8948
8949 /* Generate the compare. */
8950 scratch = gen_reg_rtx (CCmode);
8951 pat = GEN_FCN (icode) (scratch, op0, op1);
8952 if (! pat)
8953 return const0_rtx;
8954 emit_insn (pat);
8955
8956 if (mode0 == V2SImode)
8957 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8958 else
8959 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8960
8961 return target;
8962}
8963
0ac081f6
AH
8964/* Expand an expression EXP that calls a built-in function,
8965 with result going to TARGET if that's convenient
8966 (and in mode MODE if that's convenient).
8967 SUBTARGET may be used as the target for computing one of EXP's operands.
8968 IGNORE is nonzero if the value is to be ignored. */
8969
8970static rtx
a2369ed3 8971rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8972 enum machine_mode mode ATTRIBUTE_UNUSED,
8973 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8974{
5039610b 8975 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8976 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8977 const struct builtin_description *d;
92898235
AH
8978 size_t i;
8979 rtx ret;
8980 bool success;
f676971a 8981
9c78b944
DE
8982 if (fcode == RS6000_BUILTIN_RECIP)
8983 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8984
8985 if (fcode == RS6000_BUILTIN_RECIPF)
8986 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8987
8988 if (fcode == RS6000_BUILTIN_RSQRTF)
8989 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8990
7ccf35ed
DN
8991 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8992 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8993 {
8994 int icode = (int) CODE_FOR_altivec_lvsr;
8995 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8996 enum machine_mode mode = insn_data[icode].operand[1].mode;
8997 tree arg;
8998 rtx op, addr, pat;
8999
37409796 9000 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9001
5039610b 9002 arg = CALL_EXPR_ARG (exp, 0);
37409796 9003 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9004 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9005 addr = memory_address (mode, op);
9006 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9007 op = addr;
9008 else
9009 {
9010 /* For the load case need to negate the address. */
9011 op = gen_reg_rtx (GET_MODE (addr));
9012 emit_insn (gen_rtx_SET (VOIDmode, op,
9013 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9014 }
7ccf35ed
DN
9015 op = gen_rtx_MEM (mode, op);
9016
9017 if (target == 0
9018 || GET_MODE (target) != tmode
9019 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9020 target = gen_reg_rtx (tmode);
9021
9022 /*pat = gen_altivec_lvsr (target, op);*/
9023 pat = GEN_FCN (icode) (target, op);
9024 if (!pat)
9025 return 0;
9026 emit_insn (pat);
9027
9028 return target;
9029 }
5039610b
SL
9030
9031 /* FIXME: There's got to be a nicer way to handle this case than
9032 constructing a new CALL_EXPR. */
f57d17f1
TM
9033 if (fcode == ALTIVEC_BUILTIN_VCFUX
9034 || fcode == ALTIVEC_BUILTIN_VCFSX)
9035 {
5039610b
SL
9036 if (call_expr_nargs (exp) == 1)
9037 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9038 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9039 }
7ccf35ed 9040
0ac081f6 9041 if (TARGET_ALTIVEC)
92898235
AH
9042 {
9043 ret = altivec_expand_builtin (exp, target, &success);
9044
a3170dc6
AH
9045 if (success)
9046 return ret;
9047 }
9048 if (TARGET_SPE)
9049 {
9050 ret = spe_expand_builtin (exp, target, &success);
9051
92898235
AH
9052 if (success)
9053 return ret;
9054 }
96038623
DE
9055 if (TARGET_PAIRED_FLOAT)
9056 {
9057 ret = paired_expand_builtin (exp, target, &success);
9058
9059 if (success)
9060 return ret;
9061 }
92898235 9062
96038623 9063 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9064
37409796
NS
9065 /* Handle simple unary operations. */
9066 d = (struct builtin_description *) bdesc_1arg;
9067 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9068 if (d->code == fcode)
5039610b 9069 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9070
37409796
NS
9071 /* Handle simple binary operations. */
9072 d = (struct builtin_description *) bdesc_2arg;
9073 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9074 if (d->code == fcode)
5039610b 9075 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9076
37409796 9077 /* Handle simple ternary operations. */
586de218 9078 d = bdesc_3arg;
37409796
NS
9079 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9080 if (d->code == fcode)
5039610b 9081 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9082
37409796 9083 gcc_unreachable ();
0ac081f6
AH
9084}
9085
7c62e993
PB
9086static tree
9087build_opaque_vector_type (tree node, int nunits)
9088{
9089 node = copy_node (node);
9090 TYPE_MAIN_VARIANT (node) = node;
9091 return build_vector_type (node, nunits);
9092}
9093
0ac081f6 9094static void
863d938c 9095rs6000_init_builtins (void)
0ac081f6 9096{
4a5eab38
PB
9097 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9098 V2SF_type_node = build_vector_type (float_type_node, 2);
9099 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9100 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9101 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9102 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9103 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9104
9105 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9106 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9107 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9108
7c62e993
PB
9109 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9110 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9111 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9112 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9113
8bb418a3
ZL
9114 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9115 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9116 'vector unsigned short'. */
9117
8dd16ecc
NS
9118 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9119 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9120 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9121 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9122
58646b77
PB
9123 long_integer_type_internal_node = long_integer_type_node;
9124 long_unsigned_type_internal_node = long_unsigned_type_node;
9125 intQI_type_internal_node = intQI_type_node;
9126 uintQI_type_internal_node = unsigned_intQI_type_node;
9127 intHI_type_internal_node = intHI_type_node;
9128 uintHI_type_internal_node = unsigned_intHI_type_node;
9129 intSI_type_internal_node = intSI_type_node;
9130 uintSI_type_internal_node = unsigned_intSI_type_node;
9131 float_type_internal_node = float_type_node;
9132 void_type_internal_node = void_type_node;
9133
8bb418a3
ZL
9134 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9135 get_identifier ("__bool char"),
9136 bool_char_type_node));
9137 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9138 get_identifier ("__bool short"),
9139 bool_short_type_node));
9140 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9141 get_identifier ("__bool int"),
9142 bool_int_type_node));
9143 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9144 get_identifier ("__pixel"),
9145 pixel_type_node));
9146
4a5eab38
PB
9147 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9148 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9149 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9150 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9151
9152 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9153 get_identifier ("__vector unsigned char"),
9154 unsigned_V16QI_type_node));
9155 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9156 get_identifier ("__vector signed char"),
9157 V16QI_type_node));
9158 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9159 get_identifier ("__vector __bool char"),
9160 bool_V16QI_type_node));
9161
9162 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9163 get_identifier ("__vector unsigned short"),
9164 unsigned_V8HI_type_node));
9165 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9166 get_identifier ("__vector signed short"),
9167 V8HI_type_node));
9168 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9169 get_identifier ("__vector __bool short"),
9170 bool_V8HI_type_node));
9171
9172 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9173 get_identifier ("__vector unsigned int"),
9174 unsigned_V4SI_type_node));
9175 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9176 get_identifier ("__vector signed int"),
9177 V4SI_type_node));
9178 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9179 get_identifier ("__vector __bool int"),
9180 bool_V4SI_type_node));
9181
9182 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9183 get_identifier ("__vector float"),
9184 V4SF_type_node));
9185 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9186 get_identifier ("__vector __pixel"),
9187 pixel_V8HI_type_node));
9188
96038623
DE
9189 if (TARGET_PAIRED_FLOAT)
9190 paired_init_builtins ();
a3170dc6 9191 if (TARGET_SPE)
3fdaa45a 9192 spe_init_builtins ();
0ac081f6
AH
9193 if (TARGET_ALTIVEC)
9194 altivec_init_builtins ();
96038623 9195 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9196 rs6000_common_init_builtins ();
9c78b944
DE
9197 if (TARGET_PPC_GFXOPT)
9198 {
9199 tree ftype = build_function_type_list (float_type_node,
9200 float_type_node,
9201 float_type_node,
9202 NULL_TREE);
9203 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9204 RS6000_BUILTIN_RECIPF);
9205
9206 ftype = build_function_type_list (float_type_node,
9207 float_type_node,
9208 NULL_TREE);
9209 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9210 RS6000_BUILTIN_RSQRTF);
9211 }
9212 if (TARGET_POPCNTB)
9213 {
9214 tree ftype = build_function_type_list (double_type_node,
9215 double_type_node,
9216 double_type_node,
9217 NULL_TREE);
9218 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9219 RS6000_BUILTIN_RECIP);
9220
9221 }
69ca3549
DE
9222
9223#if TARGET_XCOFF
9224 /* AIX libm provides clog as __clog. */
9225 if (built_in_decls [BUILT_IN_CLOG])
9226 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9227#endif
fb220235
FXC
9228
9229#ifdef SUBTARGET_INIT_BUILTINS
9230 SUBTARGET_INIT_BUILTINS;
9231#endif
0ac081f6
AH
9232}
9233
a3170dc6
AH
9234/* Search through a set of builtins and enable the mask bits.
9235 DESC is an array of builtins.
b6d08ca1 9236 SIZE is the total number of builtins.
a3170dc6
AH
9237 START is the builtin enum at which to start.
9238 END is the builtin enum at which to end. */
0ac081f6 9239static void
a2369ed3 9240enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9241 enum rs6000_builtins start,
a2369ed3 9242 enum rs6000_builtins end)
a3170dc6
AH
9243{
9244 int i;
9245
9246 for (i = 0; i < size; ++i)
9247 if (desc[i].code == start)
9248 break;
9249
9250 if (i == size)
9251 return;
9252
9253 for (; i < size; ++i)
9254 {
9255 /* Flip all the bits on. */
9256 desc[i].mask = target_flags;
9257 if (desc[i].code == end)
9258 break;
9259 }
9260}
9261
9262static void
863d938c 9263spe_init_builtins (void)
0ac081f6 9264{
a3170dc6
AH
9265 tree endlink = void_list_node;
9266 tree puint_type_node = build_pointer_type (unsigned_type_node);
9267 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9268 struct builtin_description *d;
0ac081f6
AH
9269 size_t i;
9270
a3170dc6
AH
9271 tree v2si_ftype_4_v2si
9272 = build_function_type
3fdaa45a
AH
9273 (opaque_V2SI_type_node,
9274 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9275 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9276 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9277 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9278 endlink)))));
9279
9280 tree v2sf_ftype_4_v2sf
9281 = build_function_type
3fdaa45a
AH
9282 (opaque_V2SF_type_node,
9283 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9284 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9285 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9286 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9287 endlink)))));
9288
9289 tree int_ftype_int_v2si_v2si
9290 = build_function_type
9291 (integer_type_node,
9292 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9293 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9294 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9295 endlink))));
9296
9297 tree int_ftype_int_v2sf_v2sf
9298 = build_function_type
9299 (integer_type_node,
9300 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9301 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9302 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9303 endlink))));
9304
9305 tree void_ftype_v2si_puint_int
9306 = build_function_type (void_type_node,
3fdaa45a 9307 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9308 tree_cons (NULL_TREE, puint_type_node,
9309 tree_cons (NULL_TREE,
9310 integer_type_node,
9311 endlink))));
9312
9313 tree void_ftype_v2si_puint_char
9314 = build_function_type (void_type_node,
3fdaa45a 9315 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9316 tree_cons (NULL_TREE, puint_type_node,
9317 tree_cons (NULL_TREE,
9318 char_type_node,
9319 endlink))));
9320
9321 tree void_ftype_v2si_pv2si_int
9322 = build_function_type (void_type_node,
3fdaa45a 9323 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9324 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9325 tree_cons (NULL_TREE,
9326 integer_type_node,
9327 endlink))));
9328
9329 tree void_ftype_v2si_pv2si_char
9330 = build_function_type (void_type_node,
3fdaa45a 9331 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9332 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9333 tree_cons (NULL_TREE,
9334 char_type_node,
9335 endlink))));
9336
9337 tree void_ftype_int
9338 = build_function_type (void_type_node,
9339 tree_cons (NULL_TREE, integer_type_node, endlink));
9340
9341 tree int_ftype_void
36e8d515 9342 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9343
9344 tree v2si_ftype_pv2si_int
3fdaa45a 9345 = build_function_type (opaque_V2SI_type_node,
6035d635 9346 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9347 tree_cons (NULL_TREE, integer_type_node,
9348 endlink)));
9349
9350 tree v2si_ftype_puint_int
3fdaa45a 9351 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9352 tree_cons (NULL_TREE, puint_type_node,
9353 tree_cons (NULL_TREE, integer_type_node,
9354 endlink)));
9355
9356 tree v2si_ftype_pushort_int
3fdaa45a 9357 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9358 tree_cons (NULL_TREE, pushort_type_node,
9359 tree_cons (NULL_TREE, integer_type_node,
9360 endlink)));
9361
00332c9f
AH
9362 tree v2si_ftype_signed_char
9363 = build_function_type (opaque_V2SI_type_node,
9364 tree_cons (NULL_TREE, signed_char_type_node,
9365 endlink));
9366
a3170dc6
AH
9367 /* The initialization of the simple binary and unary builtins is
9368 done in rs6000_common_init_builtins, but we have to enable the
9369 mask bits here manually because we have run out of `target_flags'
9370 bits. We really need to redesign this mask business. */
9371
9372 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9373 ARRAY_SIZE (bdesc_2arg),
9374 SPE_BUILTIN_EVADDW,
9375 SPE_BUILTIN_EVXOR);
9376 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9377 ARRAY_SIZE (bdesc_1arg),
9378 SPE_BUILTIN_EVABS,
9379 SPE_BUILTIN_EVSUBFUSIAAW);
9380 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9381 ARRAY_SIZE (bdesc_spe_predicates),
9382 SPE_BUILTIN_EVCMPEQ,
9383 SPE_BUILTIN_EVFSTSTLT);
9384 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9385 ARRAY_SIZE (bdesc_spe_evsel),
9386 SPE_BUILTIN_EVSEL_CMPGTS,
9387 SPE_BUILTIN_EVSEL_FSTSTEQ);
9388
36252949
AH
9389 (*lang_hooks.decls.pushdecl)
9390 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9391 opaque_V2SI_type_node));
9392
a3170dc6 9393 /* Initialize irregular SPE builtins. */
f676971a 9394
a3170dc6
AH
9395 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9396 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9397 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9398 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9399 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9400 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9401 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9402 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9403 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9404 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9405 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9406 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9407 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9408 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9409 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9410 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9411 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9412 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9413
9414 /* Loads. */
9415 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9416 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9417 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9418 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9419 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9420 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9421 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9422 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9423 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9424 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9425 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9426 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9427 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9428 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9429 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9430 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9431 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9432 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9433 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9434 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9435 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9436 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9437
9438 /* Predicates. */
9439 d = (struct builtin_description *) bdesc_spe_predicates;
9440 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9441 {
9442 tree type;
9443
9444 switch (insn_data[d->icode].operand[1].mode)
9445 {
9446 case V2SImode:
9447 type = int_ftype_int_v2si_v2si;
9448 break;
9449 case V2SFmode:
9450 type = int_ftype_int_v2sf_v2sf;
9451 break;
9452 default:
37409796 9453 gcc_unreachable ();
a3170dc6
AH
9454 }
9455
9456 def_builtin (d->mask, d->name, type, d->code);
9457 }
9458
9459 /* Evsel predicates. */
9460 d = (struct builtin_description *) bdesc_spe_evsel;
9461 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9462 {
9463 tree type;
9464
9465 switch (insn_data[d->icode].operand[1].mode)
9466 {
9467 case V2SImode:
9468 type = v2si_ftype_4_v2si;
9469 break;
9470 case V2SFmode:
9471 type = v2sf_ftype_4_v2sf;
9472 break;
9473 default:
37409796 9474 gcc_unreachable ();
a3170dc6
AH
9475 }
9476
9477 def_builtin (d->mask, d->name, type, d->code);
9478 }
9479}
9480
96038623
DE
9481static void
9482paired_init_builtins (void)
9483{
23a651fc 9484 const struct builtin_description *d;
96038623
DE
9485 size_t i;
9486 tree endlink = void_list_node;
9487
9488 tree int_ftype_int_v2sf_v2sf
9489 = build_function_type
9490 (integer_type_node,
9491 tree_cons (NULL_TREE, integer_type_node,
9492 tree_cons (NULL_TREE, V2SF_type_node,
9493 tree_cons (NULL_TREE, V2SF_type_node,
9494 endlink))));
9495 tree pcfloat_type_node =
9496 build_pointer_type (build_qualified_type
9497 (float_type_node, TYPE_QUAL_CONST));
9498
9499 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9500 long_integer_type_node,
9501 pcfloat_type_node,
9502 NULL_TREE);
9503 tree void_ftype_v2sf_long_pcfloat =
9504 build_function_type_list (void_type_node,
9505 V2SF_type_node,
9506 long_integer_type_node,
9507 pcfloat_type_node,
9508 NULL_TREE);
9509
9510
9511 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9512 PAIRED_BUILTIN_LX);
9513
9514
9515 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9516 PAIRED_BUILTIN_STX);
9517
9518 /* Predicates. */
23a651fc 9519 d = bdesc_paired_preds;
96038623
DE
9520 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9521 {
9522 tree type;
9523
9524 switch (insn_data[d->icode].operand[1].mode)
9525 {
9526 case V2SFmode:
9527 type = int_ftype_int_v2sf_v2sf;
9528 break;
9529 default:
9530 gcc_unreachable ();
9531 }
9532
9533 def_builtin (d->mask, d->name, type, d->code);
9534 }
9535}
9536
a3170dc6 9537static void
863d938c 9538altivec_init_builtins (void)
a3170dc6 9539{
586de218
KG
9540 const struct builtin_description *d;
9541 const struct builtin_description_predicates *dp;
a3170dc6 9542 size_t i;
7a4eca66
DE
9543 tree ftype;
9544
a3170dc6
AH
9545 tree pfloat_type_node = build_pointer_type (float_type_node);
9546 tree pint_type_node = build_pointer_type (integer_type_node);
9547 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9548 tree pchar_type_node = build_pointer_type (char_type_node);
9549
9550 tree pvoid_type_node = build_pointer_type (void_type_node);
9551
0dbc3651
ZW
9552 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9553 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9554 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9555 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9556
9557 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9558
58646b77
PB
9559 tree int_ftype_opaque
9560 = build_function_type_list (integer_type_node,
9561 opaque_V4SI_type_node, NULL_TREE);
9562
9563 tree opaque_ftype_opaque_int
9564 = build_function_type_list (opaque_V4SI_type_node,
9565 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9566 tree opaque_ftype_opaque_opaque_int
9567 = build_function_type_list (opaque_V4SI_type_node,
9568 opaque_V4SI_type_node, opaque_V4SI_type_node,
9569 integer_type_node, NULL_TREE);
9570 tree int_ftype_int_opaque_opaque
9571 = build_function_type_list (integer_type_node,
9572 integer_type_node, opaque_V4SI_type_node,
9573 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9574 tree int_ftype_int_v4si_v4si
9575 = build_function_type_list (integer_type_node,
9576 integer_type_node, V4SI_type_node,
9577 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9578 tree v4sf_ftype_pcfloat
9579 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9580 tree void_ftype_pfloat_v4sf
b4de2f7d 9581 = build_function_type_list (void_type_node,
a3170dc6 9582 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9583 tree v4si_ftype_pcint
9584 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9585 tree void_ftype_pint_v4si
b4de2f7d
AH
9586 = build_function_type_list (void_type_node,
9587 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9588 tree v8hi_ftype_pcshort
9589 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9590 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9591 = build_function_type_list (void_type_node,
9592 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9593 tree v16qi_ftype_pcchar
9594 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9595 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9596 = build_function_type_list (void_type_node,
9597 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9598 tree void_ftype_v4si
b4de2f7d 9599 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9600 tree v8hi_ftype_void
9601 = build_function_type (V8HI_type_node, void_list_node);
9602 tree void_ftype_void
9603 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9604 tree void_ftype_int
9605 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9606
58646b77
PB
9607 tree opaque_ftype_long_pcvoid
9608 = build_function_type_list (opaque_V4SI_type_node,
9609 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9610 tree v16qi_ftype_long_pcvoid
a3170dc6 9611 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9612 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9613 tree v8hi_ftype_long_pcvoid
a3170dc6 9614 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9615 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9616 tree v4si_ftype_long_pcvoid
a3170dc6 9617 = build_function_type_list (V4SI_type_node,
b4a62fa0 9618 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9619
58646b77
PB
9620 tree void_ftype_opaque_long_pvoid
9621 = build_function_type_list (void_type_node,
9622 opaque_V4SI_type_node, long_integer_type_node,
9623 pvoid_type_node, NULL_TREE);
b4a62fa0 9624 tree void_ftype_v4si_long_pvoid
b4de2f7d 9625 = build_function_type_list (void_type_node,
b4a62fa0 9626 V4SI_type_node, long_integer_type_node,
b4de2f7d 9627 pvoid_type_node, NULL_TREE);
b4a62fa0 9628 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9629 = build_function_type_list (void_type_node,
b4a62fa0 9630 V16QI_type_node, long_integer_type_node,
b4de2f7d 9631 pvoid_type_node, NULL_TREE);
b4a62fa0 9632 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9633 = build_function_type_list (void_type_node,
b4a62fa0 9634 V8HI_type_node, long_integer_type_node,
b4de2f7d 9635 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9636 tree int_ftype_int_v8hi_v8hi
9637 = build_function_type_list (integer_type_node,
9638 integer_type_node, V8HI_type_node,
9639 V8HI_type_node, NULL_TREE);
9640 tree int_ftype_int_v16qi_v16qi
9641 = build_function_type_list (integer_type_node,
9642 integer_type_node, V16QI_type_node,
9643 V16QI_type_node, NULL_TREE);
9644 tree int_ftype_int_v4sf_v4sf
9645 = build_function_type_list (integer_type_node,
9646 integer_type_node, V4SF_type_node,
9647 V4SF_type_node, NULL_TREE);
9648 tree v4si_ftype_v4si
9649 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9650 tree v8hi_ftype_v8hi
9651 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9652 tree v16qi_ftype_v16qi
9653 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9654 tree v4sf_ftype_v4sf
9655 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9656 tree void_ftype_pcvoid_int_int
a3170dc6 9657 = build_function_type_list (void_type_node,
0dbc3651 9658 pcvoid_type_node, integer_type_node,
8bb418a3 9659 integer_type_node, NULL_TREE);
8bb418a3 9660
0dbc3651
ZW
9661 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9662 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9663 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9664 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9665 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9666 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9667 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9668 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9669 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9670 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9671 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9672 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9673 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9674 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9676 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9678 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9680 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9682 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9684 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9686 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9690 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9693 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9694 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9695 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9696 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9697 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9698 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9699 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9700 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9701 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9702 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9703 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9704 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9705 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9706 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9707
9708 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9709
9710 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9711 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9712 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9713 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9714 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9715 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9716 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9717 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9718 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9719 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9720
a3170dc6 9721 /* Add the DST variants. */
586de218 9722 d = bdesc_dst;
a3170dc6 9723 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9724 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9725
9726 /* Initialize the predicates. */
586de218 9727 dp = bdesc_altivec_preds;
a3170dc6
AH
9728 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9729 {
9730 enum machine_mode mode1;
9731 tree type;
58646b77
PB
9732 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9733 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9734
58646b77
PB
9735 if (is_overloaded)
9736 mode1 = VOIDmode;
9737 else
9738 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9739
9740 switch (mode1)
9741 {
58646b77
PB
9742 case VOIDmode:
9743 type = int_ftype_int_opaque_opaque;
9744 break;
a3170dc6
AH
9745 case V4SImode:
9746 type = int_ftype_int_v4si_v4si;
9747 break;
9748 case V8HImode:
9749 type = int_ftype_int_v8hi_v8hi;
9750 break;
9751 case V16QImode:
9752 type = int_ftype_int_v16qi_v16qi;
9753 break;
9754 case V4SFmode:
9755 type = int_ftype_int_v4sf_v4sf;
9756 break;
9757 default:
37409796 9758 gcc_unreachable ();
a3170dc6 9759 }
f676971a 9760
a3170dc6
AH
9761 def_builtin (dp->mask, dp->name, type, dp->code);
9762 }
9763
9764 /* Initialize the abs* operators. */
586de218 9765 d = bdesc_abs;
a3170dc6
AH
9766 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9767 {
9768 enum machine_mode mode0;
9769 tree type;
9770
9771 mode0 = insn_data[d->icode].operand[0].mode;
9772
9773 switch (mode0)
9774 {
9775 case V4SImode:
9776 type = v4si_ftype_v4si;
9777 break;
9778 case V8HImode:
9779 type = v8hi_ftype_v8hi;
9780 break;
9781 case V16QImode:
9782 type = v16qi_ftype_v16qi;
9783 break;
9784 case V4SFmode:
9785 type = v4sf_ftype_v4sf;
9786 break;
9787 default:
37409796 9788 gcc_unreachable ();
a3170dc6 9789 }
f676971a 9790
a3170dc6
AH
9791 def_builtin (d->mask, d->name, type, d->code);
9792 }
7ccf35ed 9793
13c62176
DN
9794 if (TARGET_ALTIVEC)
9795 {
9796 tree decl;
9797
9798 /* Initialize target builtin that implements
9799 targetm.vectorize.builtin_mask_for_load. */
9800
c79efc4d
RÁE
9801 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9802 v16qi_ftype_long_pcvoid,
9803 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9804 BUILT_IN_MD, NULL, NULL_TREE);
9805 TREE_READONLY (decl) = 1;
13c62176
DN
9806 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9807 altivec_builtin_mask_for_load = decl;
13c62176 9808 }
7a4eca66
DE
9809
9810 /* Access to the vec_init patterns. */
9811 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9812 integer_type_node, integer_type_node,
9813 integer_type_node, NULL_TREE);
9814 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9815 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9816
9817 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9818 short_integer_type_node,
9819 short_integer_type_node,
9820 short_integer_type_node,
9821 short_integer_type_node,
9822 short_integer_type_node,
9823 short_integer_type_node,
9824 short_integer_type_node, NULL_TREE);
9825 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9826 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9827
9828 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9829 char_type_node, char_type_node,
9830 char_type_node, char_type_node,
9831 char_type_node, char_type_node,
9832 char_type_node, char_type_node,
9833 char_type_node, char_type_node,
9834 char_type_node, char_type_node,
9835 char_type_node, char_type_node,
9836 char_type_node, NULL_TREE);
9837 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9838 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9839
9840 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9841 float_type_node, float_type_node,
9842 float_type_node, NULL_TREE);
9843 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9844 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9845
9846 /* Access to the vec_set patterns. */
9847 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9848 intSI_type_node,
9849 integer_type_node, NULL_TREE);
9850 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9851 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9852
9853 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9854 intHI_type_node,
9855 integer_type_node, NULL_TREE);
9856 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9857 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9858
9859 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9860 intQI_type_node,
9861 integer_type_node, NULL_TREE);
9862 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9863 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9864
9865 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9866 float_type_node,
9867 integer_type_node, NULL_TREE);
9868 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9869 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9870
9871 /* Access to the vec_extract patterns. */
9872 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9873 integer_type_node, NULL_TREE);
9874 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9875 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9876
9877 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9878 integer_type_node, NULL_TREE);
9879 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9880 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9881
9882 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9883 integer_type_node, NULL_TREE);
9884 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9885 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9886
9887 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9888 integer_type_node, NULL_TREE);
9889 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9890 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9891}
9892
9893static void
863d938c 9894rs6000_common_init_builtins (void)
a3170dc6 9895{
586de218 9896 const struct builtin_description *d;
a3170dc6
AH
9897 size_t i;
9898
96038623
DE
9899 tree v2sf_ftype_v2sf_v2sf_v2sf
9900 = build_function_type_list (V2SF_type_node,
9901 V2SF_type_node, V2SF_type_node,
9902 V2SF_type_node, NULL_TREE);
9903
a3170dc6
AH
9904 tree v4sf_ftype_v4sf_v4sf_v16qi
9905 = build_function_type_list (V4SF_type_node,
9906 V4SF_type_node, V4SF_type_node,
9907 V16QI_type_node, NULL_TREE);
9908 tree v4si_ftype_v4si_v4si_v16qi
9909 = build_function_type_list (V4SI_type_node,
9910 V4SI_type_node, V4SI_type_node,
9911 V16QI_type_node, NULL_TREE);
9912 tree v8hi_ftype_v8hi_v8hi_v16qi
9913 = build_function_type_list (V8HI_type_node,
9914 V8HI_type_node, V8HI_type_node,
9915 V16QI_type_node, NULL_TREE);
9916 tree v16qi_ftype_v16qi_v16qi_v16qi
9917 = build_function_type_list (V16QI_type_node,
9918 V16QI_type_node, V16QI_type_node,
9919 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9920 tree v4si_ftype_int
9921 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9922 tree v8hi_ftype_int
9923 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9924 tree v16qi_ftype_int
9925 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9926 tree v8hi_ftype_v16qi
9927 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9928 tree v4sf_ftype_v4sf
9929 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9930
9931 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9932 = build_function_type_list (opaque_V2SI_type_node,
9933 opaque_V2SI_type_node,
9934 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9935
96038623 9936 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9937 = build_function_type_list (opaque_V2SF_type_node,
9938 opaque_V2SF_type_node,
9939 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9940
96038623
DE
9941 tree v2sf_ftype_v2sf_v2sf
9942 = build_function_type_list (V2SF_type_node,
9943 V2SF_type_node,
9944 V2SF_type_node, NULL_TREE);
9945
9946
a3170dc6 9947 tree v2si_ftype_int_int
2abe3e28 9948 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9949 integer_type_node, integer_type_node,
9950 NULL_TREE);
9951
58646b77
PB
9952 tree opaque_ftype_opaque
9953 = build_function_type_list (opaque_V4SI_type_node,
9954 opaque_V4SI_type_node, NULL_TREE);
9955
a3170dc6 9956 tree v2si_ftype_v2si
2abe3e28
AH
9957 = build_function_type_list (opaque_V2SI_type_node,
9958 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9959
96038623 9960 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9961 = build_function_type_list (opaque_V2SF_type_node,
9962 opaque_V2SF_type_node, NULL_TREE);
f676971a 9963
96038623
DE
9964 tree v2sf_ftype_v2sf
9965 = build_function_type_list (V2SF_type_node,
9966 V2SF_type_node, NULL_TREE);
9967
a3170dc6 9968 tree v2sf_ftype_v2si
2abe3e28
AH
9969 = build_function_type_list (opaque_V2SF_type_node,
9970 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9971
9972 tree v2si_ftype_v2sf
2abe3e28
AH
9973 = build_function_type_list (opaque_V2SI_type_node,
9974 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9975
9976 tree v2si_ftype_v2si_char
2abe3e28
AH
9977 = build_function_type_list (opaque_V2SI_type_node,
9978 opaque_V2SI_type_node,
9979 char_type_node, NULL_TREE);
a3170dc6
AH
9980
9981 tree v2si_ftype_int_char
2abe3e28 9982 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9983 integer_type_node, char_type_node, NULL_TREE);
9984
9985 tree v2si_ftype_char
2abe3e28
AH
9986 = build_function_type_list (opaque_V2SI_type_node,
9987 char_type_node, NULL_TREE);
a3170dc6
AH
9988
9989 tree int_ftype_int_int
9990 = build_function_type_list (integer_type_node,
9991 integer_type_node, integer_type_node,
9992 NULL_TREE);
95385cbb 9993
58646b77
PB
9994 tree opaque_ftype_opaque_opaque
9995 = build_function_type_list (opaque_V4SI_type_node,
9996 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9997 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9998 = build_function_type_list (V4SI_type_node,
9999 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10000 tree v4sf_ftype_v4si_int
b4de2f7d 10001 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10002 V4SI_type_node, integer_type_node, NULL_TREE);
10003 tree v4si_ftype_v4sf_int
b4de2f7d 10004 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10005 V4SF_type_node, integer_type_node, NULL_TREE);
10006 tree v4si_ftype_v4si_int
b4de2f7d 10007 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10008 V4SI_type_node, integer_type_node, NULL_TREE);
10009 tree v8hi_ftype_v8hi_int
b4de2f7d 10010 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10011 V8HI_type_node, integer_type_node, NULL_TREE);
10012 tree v16qi_ftype_v16qi_int
b4de2f7d 10013 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10014 V16QI_type_node, integer_type_node, NULL_TREE);
10015 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10016 = build_function_type_list (V16QI_type_node,
10017 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10018 integer_type_node, NULL_TREE);
10019 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10020 = build_function_type_list (V8HI_type_node,
10021 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10022 integer_type_node, NULL_TREE);
10023 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10024 = build_function_type_list (V4SI_type_node,
10025 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10026 integer_type_node, NULL_TREE);
10027 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10028 = build_function_type_list (V4SF_type_node,
10029 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10030 integer_type_node, NULL_TREE);
0ac081f6 10031 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10032 = build_function_type_list (V4SF_type_node,
10033 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10034 tree opaque_ftype_opaque_opaque_opaque
10035 = build_function_type_list (opaque_V4SI_type_node,
10036 opaque_V4SI_type_node, opaque_V4SI_type_node,
10037 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10038 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10039 = build_function_type_list (V4SF_type_node,
10040 V4SF_type_node, V4SF_type_node,
10041 V4SI_type_node, NULL_TREE);
2212663f 10042 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10043 = build_function_type_list (V4SF_type_node,
10044 V4SF_type_node, V4SF_type_node,
10045 V4SF_type_node, NULL_TREE);
f676971a 10046 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10047 = build_function_type_list (V4SI_type_node,
10048 V4SI_type_node, V4SI_type_node,
10049 V4SI_type_node, NULL_TREE);
0ac081f6 10050 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10051 = build_function_type_list (V8HI_type_node,
10052 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10053 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10054 = build_function_type_list (V8HI_type_node,
10055 V8HI_type_node, V8HI_type_node,
10056 V8HI_type_node, NULL_TREE);
c4ad648e 10057 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10058 = build_function_type_list (V4SI_type_node,
10059 V8HI_type_node, V8HI_type_node,
10060 V4SI_type_node, NULL_TREE);
c4ad648e 10061 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10062 = build_function_type_list (V4SI_type_node,
10063 V16QI_type_node, V16QI_type_node,
10064 V4SI_type_node, NULL_TREE);
0ac081f6 10065 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10066 = build_function_type_list (V16QI_type_node,
10067 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10068 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10069 = build_function_type_list (V4SI_type_node,
10070 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10071 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10072 = build_function_type_list (V8HI_type_node,
10073 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10074 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10075 = build_function_type_list (V4SI_type_node,
10076 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10077 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10078 = build_function_type_list (V8HI_type_node,
10079 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10080 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10081 = build_function_type_list (V16QI_type_node,
10082 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10083 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10084 = build_function_type_list (V4SI_type_node,
10085 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10086 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10087 = build_function_type_list (V4SI_type_node,
10088 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10089 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10090 = build_function_type_list (V4SI_type_node,
10091 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10092 tree v4si_ftype_v8hi
10093 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10094 tree int_ftype_v4si_v4si
10095 = build_function_type_list (integer_type_node,
10096 V4SI_type_node, V4SI_type_node, NULL_TREE);
10097 tree int_ftype_v4sf_v4sf
10098 = build_function_type_list (integer_type_node,
10099 V4SF_type_node, V4SF_type_node, NULL_TREE);
10100 tree int_ftype_v16qi_v16qi
10101 = build_function_type_list (integer_type_node,
10102 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10103 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10104 = build_function_type_list (integer_type_node,
10105 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10106
6f317ef3 10107 /* Add the simple ternary operators. */
586de218 10108 d = bdesc_3arg;
ca7558fc 10109 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10110 {
2212663f
DB
10111 enum machine_mode mode0, mode1, mode2, mode3;
10112 tree type;
58646b77
PB
10113 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10114 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10115
58646b77
PB
10116 if (is_overloaded)
10117 {
10118 mode0 = VOIDmode;
10119 mode1 = VOIDmode;
10120 mode2 = VOIDmode;
10121 mode3 = VOIDmode;
10122 }
10123 else
10124 {
10125 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10126 continue;
f676971a 10127
58646b77
PB
10128 mode0 = insn_data[d->icode].operand[0].mode;
10129 mode1 = insn_data[d->icode].operand[1].mode;
10130 mode2 = insn_data[d->icode].operand[2].mode;
10131 mode3 = insn_data[d->icode].operand[3].mode;
10132 }
bb8df8a6 10133
2212663f
DB
10134 /* When all four are of the same mode. */
10135 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10136 {
10137 switch (mode0)
10138 {
58646b77
PB
10139 case VOIDmode:
10140 type = opaque_ftype_opaque_opaque_opaque;
10141 break;
617e0e1d
DB
10142 case V4SImode:
10143 type = v4si_ftype_v4si_v4si_v4si;
10144 break;
2212663f
DB
10145 case V4SFmode:
10146 type = v4sf_ftype_v4sf_v4sf_v4sf;
10147 break;
10148 case V8HImode:
10149 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10150 break;
2212663f
DB
10151 case V16QImode:
10152 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10153 break;
96038623
DE
10154 case V2SFmode:
10155 type = v2sf_ftype_v2sf_v2sf_v2sf;
10156 break;
2212663f 10157 default:
37409796 10158 gcc_unreachable ();
2212663f
DB
10159 }
10160 }
10161 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10162 {
2212663f
DB
10163 switch (mode0)
10164 {
10165 case V4SImode:
10166 type = v4si_ftype_v4si_v4si_v16qi;
10167 break;
10168 case V4SFmode:
10169 type = v4sf_ftype_v4sf_v4sf_v16qi;
10170 break;
10171 case V8HImode:
10172 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10173 break;
2212663f
DB
10174 case V16QImode:
10175 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10176 break;
2212663f 10177 default:
37409796 10178 gcc_unreachable ();
2212663f
DB
10179 }
10180 }
f676971a 10181 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10182 && mode3 == V4SImode)
24408032 10183 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10184 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10185 && mode3 == V4SImode)
24408032 10186 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10187 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10188 && mode3 == V4SImode)
24408032
AH
10189 type = v4sf_ftype_v4sf_v4sf_v4si;
10190
a7b376ee 10191 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10192 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10193 && mode3 == QImode)
b9e4e5d1 10194 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10195
a7b376ee 10196 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10197 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10198 && mode3 == QImode)
b9e4e5d1 10199 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10200
a7b376ee 10201 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10202 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10203 && mode3 == QImode)
b9e4e5d1 10204 type = v4si_ftype_v4si_v4si_int;
24408032 10205
a7b376ee 10206 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10207 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10208 && mode3 == QImode)
b9e4e5d1 10209 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10210
2212663f 10211 else
37409796 10212 gcc_unreachable ();
2212663f
DB
10213
10214 def_builtin (d->mask, d->name, type, d->code);
10215 }
10216
0ac081f6 10217 /* Add the simple binary operators. */
00b960c7 10218 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10219 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10220 {
10221 enum machine_mode mode0, mode1, mode2;
10222 tree type;
58646b77
PB
10223 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10224 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10225
58646b77
PB
10226 if (is_overloaded)
10227 {
10228 mode0 = VOIDmode;
10229 mode1 = VOIDmode;
10230 mode2 = VOIDmode;
10231 }
10232 else
bb8df8a6 10233 {
58646b77
PB
10234 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10235 continue;
f676971a 10236
58646b77
PB
10237 mode0 = insn_data[d->icode].operand[0].mode;
10238 mode1 = insn_data[d->icode].operand[1].mode;
10239 mode2 = insn_data[d->icode].operand[2].mode;
10240 }
0ac081f6
AH
10241
10242 /* When all three operands are of the same mode. */
10243 if (mode0 == mode1 && mode1 == mode2)
10244 {
10245 switch (mode0)
10246 {
58646b77
PB
10247 case VOIDmode:
10248 type = opaque_ftype_opaque_opaque;
10249 break;
0ac081f6
AH
10250 case V4SFmode:
10251 type = v4sf_ftype_v4sf_v4sf;
10252 break;
10253 case V4SImode:
10254 type = v4si_ftype_v4si_v4si;
10255 break;
10256 case V16QImode:
10257 type = v16qi_ftype_v16qi_v16qi;
10258 break;
10259 case V8HImode:
10260 type = v8hi_ftype_v8hi_v8hi;
10261 break;
a3170dc6
AH
10262 case V2SImode:
10263 type = v2si_ftype_v2si_v2si;
10264 break;
96038623
DE
10265 case V2SFmode:
10266 if (TARGET_PAIRED_FLOAT)
10267 type = v2sf_ftype_v2sf_v2sf;
10268 else
10269 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10270 break;
10271 case SImode:
10272 type = int_ftype_int_int;
10273 break;
0ac081f6 10274 default:
37409796 10275 gcc_unreachable ();
0ac081f6
AH
10276 }
10277 }
10278
10279 /* A few other combos we really don't want to do manually. */
10280
10281 /* vint, vfloat, vfloat. */
10282 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10283 type = v4si_ftype_v4sf_v4sf;
10284
10285 /* vshort, vchar, vchar. */
10286 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10287 type = v8hi_ftype_v16qi_v16qi;
10288
10289 /* vint, vshort, vshort. */
10290 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10291 type = v4si_ftype_v8hi_v8hi;
10292
10293 /* vshort, vint, vint. */
10294 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10295 type = v8hi_ftype_v4si_v4si;
10296
10297 /* vchar, vshort, vshort. */
10298 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10299 type = v16qi_ftype_v8hi_v8hi;
10300
10301 /* vint, vchar, vint. */
10302 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10303 type = v4si_ftype_v16qi_v4si;
10304
fa066a23
AH
10305 /* vint, vchar, vchar. */
10306 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10307 type = v4si_ftype_v16qi_v16qi;
10308
0ac081f6
AH
10309 /* vint, vshort, vint. */
10310 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10311 type = v4si_ftype_v8hi_v4si;
f676971a 10312
a7b376ee 10313 /* vint, vint, 5-bit literal. */
2212663f 10314 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10315 type = v4si_ftype_v4si_int;
f676971a 10316
a7b376ee 10317 /* vshort, vshort, 5-bit literal. */
2212663f 10318 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10319 type = v8hi_ftype_v8hi_int;
f676971a 10320
a7b376ee 10321 /* vchar, vchar, 5-bit literal. */
2212663f 10322 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10323 type = v16qi_ftype_v16qi_int;
0ac081f6 10324
a7b376ee 10325 /* vfloat, vint, 5-bit literal. */
617e0e1d 10326 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10327 type = v4sf_ftype_v4si_int;
f676971a 10328
a7b376ee 10329 /* vint, vfloat, 5-bit literal. */
617e0e1d 10330 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10331 type = v4si_ftype_v4sf_int;
617e0e1d 10332
a3170dc6
AH
10333 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10334 type = v2si_ftype_int_int;
10335
10336 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10337 type = v2si_ftype_v2si_char;
10338
10339 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10340 type = v2si_ftype_int_char;
10341
37409796 10342 else
0ac081f6 10343 {
37409796
NS
10344 /* int, x, x. */
10345 gcc_assert (mode0 == SImode);
0ac081f6
AH
10346 switch (mode1)
10347 {
10348 case V4SImode:
10349 type = int_ftype_v4si_v4si;
10350 break;
10351 case V4SFmode:
10352 type = int_ftype_v4sf_v4sf;
10353 break;
10354 case V16QImode:
10355 type = int_ftype_v16qi_v16qi;
10356 break;
10357 case V8HImode:
10358 type = int_ftype_v8hi_v8hi;
10359 break;
10360 default:
37409796 10361 gcc_unreachable ();
0ac081f6
AH
10362 }
10363 }
10364
2212663f
DB
10365 def_builtin (d->mask, d->name, type, d->code);
10366 }
24408032 10367
2212663f
DB
10368 /* Add the simple unary operators. */
10369 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10370 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10371 {
10372 enum machine_mode mode0, mode1;
10373 tree type;
58646b77
PB
10374 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10375 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10376
10377 if (is_overloaded)
10378 {
10379 mode0 = VOIDmode;
10380 mode1 = VOIDmode;
10381 }
10382 else
10383 {
10384 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10385 continue;
bb8df8a6 10386
58646b77
PB
10387 mode0 = insn_data[d->icode].operand[0].mode;
10388 mode1 = insn_data[d->icode].operand[1].mode;
10389 }
2212663f
DB
10390
10391 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10392 type = v4si_ftype_int;
2212663f 10393 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10394 type = v8hi_ftype_int;
2212663f 10395 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10396 type = v16qi_ftype_int;
58646b77
PB
10397 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10398 type = opaque_ftype_opaque;
617e0e1d
DB
10399 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10400 type = v4sf_ftype_v4sf;
20e26713
AH
10401 else if (mode0 == V8HImode && mode1 == V16QImode)
10402 type = v8hi_ftype_v16qi;
10403 else if (mode0 == V4SImode && mode1 == V8HImode)
10404 type = v4si_ftype_v8hi;
a3170dc6
AH
10405 else if (mode0 == V2SImode && mode1 == V2SImode)
10406 type = v2si_ftype_v2si;
10407 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10408 {
10409 if (TARGET_PAIRED_FLOAT)
10410 type = v2sf_ftype_v2sf;
10411 else
10412 type = v2sf_ftype_v2sf_spe;
10413 }
a3170dc6
AH
10414 else if (mode0 == V2SFmode && mode1 == V2SImode)
10415 type = v2sf_ftype_v2si;
10416 else if (mode0 == V2SImode && mode1 == V2SFmode)
10417 type = v2si_ftype_v2sf;
10418 else if (mode0 == V2SImode && mode1 == QImode)
10419 type = v2si_ftype_char;
2212663f 10420 else
37409796 10421 gcc_unreachable ();
2212663f 10422
0ac081f6
AH
10423 def_builtin (d->mask, d->name, type, d->code);
10424 }
10425}
10426
c15c90bb
ZW
10427static void
10428rs6000_init_libfuncs (void)
10429{
602ea4d3
JJ
10430 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10431 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10432 {
602ea4d3
JJ
10433 /* AIX library routines for float->int conversion. */
10434 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10435 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10436 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10437 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10438 }
c15c90bb 10439
602ea4d3 10440 if (!TARGET_IEEEQUAD)
98c41d98 10441 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10442 if (!TARGET_XL_COMPAT)
10443 {
10444 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10445 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10446 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10447 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10448
17caeff2 10449 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10450 {
10451 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10452 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10453 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10454 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10455 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10456 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10457 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10458
10459 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10460 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10461 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10462 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10463 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10464 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10465 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10466 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10467 }
b26941b4
JM
10468
10469 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10470 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10471 }
10472 else
10473 {
10474 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10475 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10476 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10477 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10478 }
c9034561 10479 else
c15c90bb 10480 {
c9034561 10481 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10482
10483 set_optab_libfunc (add_optab, TFmode, "_q_add");
10484 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10485 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10486 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10487 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10488 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10489 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10490
c9034561
ZW
10491 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10492 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10493 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10494 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10495 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10496 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10497
85363ca0
ZW
10498 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10499 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10500 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10501 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10502 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10503 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10504 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10505 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10506 }
10507}
fba73eb1
DE
10508
10509\f
10510/* Expand a block clear operation, and return 1 if successful. Return 0
10511 if we should let the compiler generate normal code.
10512
10513 operands[0] is the destination
10514 operands[1] is the length
57e84f18 10515 operands[3] is the alignment */
fba73eb1
DE
10516
10517int
10518expand_block_clear (rtx operands[])
10519{
10520 rtx orig_dest = operands[0];
10521 rtx bytes_rtx = operands[1];
57e84f18 10522 rtx align_rtx = operands[3];
5514620a
GK
10523 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10524 HOST_WIDE_INT align;
10525 HOST_WIDE_INT bytes;
fba73eb1
DE
10526 int offset;
10527 int clear_bytes;
5514620a 10528 int clear_step;
fba73eb1
DE
10529
10530 /* If this is not a fixed size move, just call memcpy */
10531 if (! constp)
10532 return 0;
10533
37409796
NS
10534 /* This must be a fixed size alignment */
10535 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10536 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10537
10538 /* Anything to clear? */
10539 bytes = INTVAL (bytes_rtx);
10540 if (bytes <= 0)
10541 return 1;
10542
5514620a
GK
10543 /* Use the builtin memset after a point, to avoid huge code bloat.
10544 When optimize_size, avoid any significant code bloat; calling
10545 memset is about 4 instructions, so allow for one instruction to
10546 load zero and three to do clearing. */
10547 if (TARGET_ALTIVEC && align >= 128)
10548 clear_step = 16;
10549 else if (TARGET_POWERPC64 && align >= 32)
10550 clear_step = 8;
21d818ff
NF
10551 else if (TARGET_SPE && align >= 64)
10552 clear_step = 8;
5514620a
GK
10553 else
10554 clear_step = 4;
fba73eb1 10555
5514620a
GK
10556 if (optimize_size && bytes > 3 * clear_step)
10557 return 0;
10558 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10559 return 0;
10560
10561 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10562 {
fba73eb1
DE
10563 enum machine_mode mode = BLKmode;
10564 rtx dest;
f676971a 10565
5514620a
GK
10566 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10567 {
10568 clear_bytes = 16;
10569 mode = V4SImode;
10570 }
21d818ff
NF
10571 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10572 {
10573 clear_bytes = 8;
10574 mode = V2SImode;
10575 }
5514620a 10576 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10577 /* 64-bit loads and stores require word-aligned
10578 displacements. */
10579 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10580 {
10581 clear_bytes = 8;
10582 mode = DImode;
fba73eb1 10583 }
5514620a 10584 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10585 { /* move 4 bytes */
10586 clear_bytes = 4;
10587 mode = SImode;
fba73eb1 10588 }
ec53fc93 10589 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10590 { /* move 2 bytes */
10591 clear_bytes = 2;
10592 mode = HImode;
fba73eb1
DE
10593 }
10594 else /* move 1 byte at a time */
10595 {
10596 clear_bytes = 1;
10597 mode = QImode;
fba73eb1 10598 }
f676971a 10599
fba73eb1 10600 dest = adjust_address (orig_dest, mode, offset);
f676971a 10601
5514620a 10602 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10603 }
10604
10605 return 1;
10606}
10607
35aff10b 10608\f
7e69e155
MM
10609/* Expand a block move operation, and return 1 if successful. Return 0
10610 if we should let the compiler generate normal code.
10611
10612 operands[0] is the destination
10613 operands[1] is the source
10614 operands[2] is the length
10615 operands[3] is the alignment */
10616
3933e0e1
MM
10617#define MAX_MOVE_REG 4
10618
7e69e155 10619int
a2369ed3 10620expand_block_move (rtx operands[])
7e69e155 10621{
b6c9286a
MM
10622 rtx orig_dest = operands[0];
10623 rtx orig_src = operands[1];
7e69e155 10624 rtx bytes_rtx = operands[2];
7e69e155 10625 rtx align_rtx = operands[3];
3933e0e1 10626 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10627 int align;
3933e0e1
MM
10628 int bytes;
10629 int offset;
7e69e155 10630 int move_bytes;
cabfd258
GK
10631 rtx stores[MAX_MOVE_REG];
10632 int num_reg = 0;
7e69e155 10633
3933e0e1 10634 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10635 if (! constp)
3933e0e1
MM
10636 return 0;
10637
37409796
NS
10638 /* This must be a fixed size alignment */
10639 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10640 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10641
7e69e155 10642 /* Anything to move? */
3933e0e1
MM
10643 bytes = INTVAL (bytes_rtx);
10644 if (bytes <= 0)
7e69e155
MM
10645 return 1;
10646
ea9982a8 10647 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10648 reg_parm_stack_space. */
ea9982a8 10649 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10650 return 0;
10651
cabfd258 10652 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10653 {
cabfd258 10654 union {
70128ad9 10655 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10656 rtx (*mov) (rtx, rtx);
cabfd258
GK
10657 } gen_func;
10658 enum machine_mode mode = BLKmode;
10659 rtx src, dest;
f676971a 10660
5514620a
GK
10661 /* Altivec first, since it will be faster than a string move
10662 when it applies, and usually not significantly larger. */
10663 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10664 {
10665 move_bytes = 16;
10666 mode = V4SImode;
10667 gen_func.mov = gen_movv4si;
10668 }
21d818ff
NF
10669 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10670 {
10671 move_bytes = 8;
10672 mode = V2SImode;
10673 gen_func.mov = gen_movv2si;
10674 }
5514620a 10675 else if (TARGET_STRING
cabfd258
GK
10676 && bytes > 24 /* move up to 32 bytes at a time */
10677 && ! fixed_regs[5]
10678 && ! fixed_regs[6]
10679 && ! fixed_regs[7]
10680 && ! fixed_regs[8]
10681 && ! fixed_regs[9]
10682 && ! fixed_regs[10]
10683 && ! fixed_regs[11]
10684 && ! fixed_regs[12])
7e69e155 10685 {
cabfd258 10686 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10687 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10688 }
10689 else if (TARGET_STRING
10690 && bytes > 16 /* move up to 24 bytes at a time */
10691 && ! fixed_regs[5]
10692 && ! fixed_regs[6]
10693 && ! fixed_regs[7]
10694 && ! fixed_regs[8]
10695 && ! fixed_regs[9]
10696 && ! fixed_regs[10])
10697 {
10698 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10699 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10700 }
10701 else if (TARGET_STRING
10702 && bytes > 8 /* move up to 16 bytes at a time */
10703 && ! fixed_regs[5]
10704 && ! fixed_regs[6]
10705 && ! fixed_regs[7]
10706 && ! fixed_regs[8])
10707 {
10708 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10709 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10710 }
10711 else if (bytes >= 8 && TARGET_POWERPC64
10712 /* 64-bit loads and stores require word-aligned
10713 displacements. */
fba73eb1 10714 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10715 {
10716 move_bytes = 8;
10717 mode = DImode;
10718 gen_func.mov = gen_movdi;
10719 }
10720 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10721 { /* move up to 8 bytes at a time */
10722 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10723 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10724 }
cd7d9ca4 10725 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10726 { /* move 4 bytes */
10727 move_bytes = 4;
10728 mode = SImode;
10729 gen_func.mov = gen_movsi;
10730 }
ec53fc93 10731 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10732 { /* move 2 bytes */
10733 move_bytes = 2;
10734 mode = HImode;
10735 gen_func.mov = gen_movhi;
10736 }
10737 else if (TARGET_STRING && bytes > 1)
10738 { /* move up to 4 bytes at a time */
10739 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10740 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10741 }
10742 else /* move 1 byte at a time */
10743 {
10744 move_bytes = 1;
10745 mode = QImode;
10746 gen_func.mov = gen_movqi;
10747 }
f676971a 10748
cabfd258
GK
10749 src = adjust_address (orig_src, mode, offset);
10750 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10751
10752 if (mode != BLKmode)
cabfd258
GK
10753 {
10754 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10755
cabfd258
GK
10756 emit_insn ((*gen_func.mov) (tmp_reg, src));
10757 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10758 }
3933e0e1 10759
cabfd258
GK
10760 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10761 {
10762 int i;
10763 for (i = 0; i < num_reg; i++)
10764 emit_insn (stores[i]);
10765 num_reg = 0;
10766 }
35aff10b 10767
cabfd258 10768 if (mode == BLKmode)
7e69e155 10769 {
70128ad9 10770 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10771 patterns require zero offset. */
10772 if (!REG_P (XEXP (src, 0)))
b6c9286a 10773 {
cabfd258
GK
10774 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10775 src = replace_equiv_address (src, src_reg);
b6c9286a 10776 }
cabfd258 10777 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10778
cabfd258 10779 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10780 {
cabfd258
GK
10781 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10782 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10783 }
cabfd258 10784 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10785
70128ad9 10786 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10787 GEN_INT (move_bytes & 31),
10788 align_rtx));
7e69e155 10789 }
7e69e155
MM
10790 }
10791
10792 return 1;
10793}
10794
d62294f5 10795\f
9caa3eb2
DE
10796/* Return a string to perform a load_multiple operation.
10797 operands[0] is the vector.
10798 operands[1] is the source address.
10799 operands[2] is the first destination register. */
10800
10801const char *
a2369ed3 10802rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10803{
10804 /* We have to handle the case where the pseudo used to contain the address
10805 is assigned to one of the output registers. */
10806 int i, j;
10807 int words = XVECLEN (operands[0], 0);
10808 rtx xop[10];
10809
10810 if (XVECLEN (operands[0], 0) == 1)
10811 return "{l|lwz} %2,0(%1)";
10812
10813 for (i = 0; i < words; i++)
10814 if (refers_to_regno_p (REGNO (operands[2]) + i,
10815 REGNO (operands[2]) + i + 1, operands[1], 0))
10816 {
10817 if (i == words-1)
10818 {
10819 xop[0] = GEN_INT (4 * (words-1));
10820 xop[1] = operands[1];
10821 xop[2] = operands[2];
10822 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10823 return "";
10824 }
10825 else if (i == 0)
10826 {
10827 xop[0] = GEN_INT (4 * (words-1));
10828 xop[1] = operands[1];
10829 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10830 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10831 return "";
10832 }
10833 else
10834 {
10835 for (j = 0; j < words; j++)
10836 if (j != i)
10837 {
10838 xop[0] = GEN_INT (j * 4);
10839 xop[1] = operands[1];
10840 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10841 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10842 }
10843 xop[0] = GEN_INT (i * 4);
10844 xop[1] = operands[1];
10845 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10846 return "";
10847 }
10848 }
10849
10850 return "{lsi|lswi} %2,%1,%N0";
10851}
10852
9878760c 10853\f
a4f6c312
SS
10854/* A validation routine: say whether CODE, a condition code, and MODE
10855 match. The other alternatives either don't make sense or should
10856 never be generated. */
39a10a29 10857
48d72335 10858void
a2369ed3 10859validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10860{
37409796
NS
10861 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10862 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10863 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10864
10865 /* These don't make sense. */
37409796
NS
10866 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10867 || mode != CCUNSmode);
39a10a29 10868
37409796
NS
10869 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10870 || mode == CCUNSmode);
39a10a29 10871
37409796
NS
10872 gcc_assert (mode == CCFPmode
10873 || (code != ORDERED && code != UNORDERED
10874 && code != UNEQ && code != LTGT
10875 && code != UNGT && code != UNLT
10876 && code != UNGE && code != UNLE));
f676971a
EC
10877
10878 /* These should never be generated except for
bc9ec0e0 10879 flag_finite_math_only. */
37409796
NS
10880 gcc_assert (mode != CCFPmode
10881 || flag_finite_math_only
10882 || (code != LE && code != GE
10883 && code != UNEQ && code != LTGT
10884 && code != UNGT && code != UNLT));
39a10a29
GK
10885
10886 /* These are invalid; the information is not there. */
37409796 10887 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10888}
10889
9878760c
RK
10890\f
10891/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10892 mask required to convert the result of a rotate insn into a shift
b1765bde 10893 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10894
10895int
a2369ed3 10896includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10897{
e2c953b6
DE
10898 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10899
10900 shift_mask <<= INTVAL (shiftop);
9878760c 10901
b1765bde 10902 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10903}
10904
10905/* Similar, but for right shift. */
10906
10907int
a2369ed3 10908includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10909{
a7653a2c 10910 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10911
10912 shift_mask >>= INTVAL (shiftop);
10913
b1765bde 10914 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10915}
10916
c5059423
AM
10917/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10918 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10919 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10920
10921int
a2369ed3 10922includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10923{
c5059423
AM
10924 if (GET_CODE (andop) == CONST_INT)
10925 {
02071907 10926 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10927
c5059423 10928 c = INTVAL (andop);
02071907 10929 if (c == 0 || c == ~0)
c5059423 10930 return 0;
e2c953b6 10931
02071907 10932 shift_mask = ~0;
c5059423
AM
10933 shift_mask <<= INTVAL (shiftop);
10934
b6d08ca1 10935 /* Find the least significant one bit. */
c5059423
AM
10936 lsb = c & -c;
10937
10938 /* It must coincide with the LSB of the shift mask. */
10939 if (-lsb != shift_mask)
10940 return 0;
e2c953b6 10941
c5059423
AM
10942 /* Invert to look for the next transition (if any). */
10943 c = ~c;
10944
10945 /* Remove the low group of ones (originally low group of zeros). */
10946 c &= -lsb;
10947
10948 /* Again find the lsb, and check we have all 1's above. */
10949 lsb = c & -c;
10950 return c == -lsb;
10951 }
10952 else if (GET_CODE (andop) == CONST_DOUBLE
10953 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10954 {
02071907
AM
10955 HOST_WIDE_INT low, high, lsb;
10956 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10957
10958 low = CONST_DOUBLE_LOW (andop);
10959 if (HOST_BITS_PER_WIDE_INT < 64)
10960 high = CONST_DOUBLE_HIGH (andop);
10961
10962 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10963 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10964 return 0;
10965
10966 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10967 {
02071907 10968 shift_mask_high = ~0;
c5059423
AM
10969 if (INTVAL (shiftop) > 32)
10970 shift_mask_high <<= INTVAL (shiftop) - 32;
10971
10972 lsb = high & -high;
10973
10974 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10975 return 0;
10976
10977 high = ~high;
10978 high &= -lsb;
10979
10980 lsb = high & -high;
10981 return high == -lsb;
10982 }
10983
02071907 10984 shift_mask_low = ~0;
c5059423
AM
10985 shift_mask_low <<= INTVAL (shiftop);
10986
10987 lsb = low & -low;
10988
10989 if (-lsb != shift_mask_low)
10990 return 0;
10991
10992 if (HOST_BITS_PER_WIDE_INT < 64)
10993 high = ~high;
10994 low = ~low;
10995 low &= -lsb;
10996
10997 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10998 {
10999 lsb = high & -high;
11000 return high == -lsb;
11001 }
11002
11003 lsb = low & -low;
11004 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11005 }
11006 else
11007 return 0;
11008}
e2c953b6 11009
c5059423
AM
11010/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11011 to perform a left shift. It must have SHIFTOP or more least
c1207243 11012 significant 0's, with the remainder of the word 1's. */
e2c953b6 11013
c5059423 11014int
a2369ed3 11015includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11016{
e2c953b6 11017 if (GET_CODE (andop) == CONST_INT)
c5059423 11018 {
02071907 11019 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11020
02071907 11021 shift_mask = ~0;
c5059423
AM
11022 shift_mask <<= INTVAL (shiftop);
11023 c = INTVAL (andop);
11024
c1207243 11025 /* Find the least significant one bit. */
c5059423
AM
11026 lsb = c & -c;
11027
11028 /* It must be covered by the shift mask.
a4f6c312 11029 This test also rejects c == 0. */
c5059423
AM
11030 if ((lsb & shift_mask) == 0)
11031 return 0;
11032
11033 /* Check we have all 1's above the transition, and reject all 1's. */
11034 return c == -lsb && lsb != 1;
11035 }
11036 else if (GET_CODE (andop) == CONST_DOUBLE
11037 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11038 {
02071907 11039 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11040
11041 low = CONST_DOUBLE_LOW (andop);
11042
11043 if (HOST_BITS_PER_WIDE_INT < 64)
11044 {
02071907 11045 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11046
11047 high = CONST_DOUBLE_HIGH (andop);
11048
11049 if (low == 0)
11050 {
02071907 11051 shift_mask_high = ~0;
c5059423
AM
11052 if (INTVAL (shiftop) > 32)
11053 shift_mask_high <<= INTVAL (shiftop) - 32;
11054
11055 lsb = high & -high;
11056
11057 if ((lsb & shift_mask_high) == 0)
11058 return 0;
11059
11060 return high == -lsb;
11061 }
11062 if (high != ~0)
11063 return 0;
11064 }
11065
02071907 11066 shift_mask_low = ~0;
c5059423
AM
11067 shift_mask_low <<= INTVAL (shiftop);
11068
11069 lsb = low & -low;
11070
11071 if ((lsb & shift_mask_low) == 0)
11072 return 0;
11073
11074 return low == -lsb && lsb != 1;
11075 }
e2c953b6 11076 else
c5059423 11077 return 0;
9878760c 11078}
35068b43 11079
11ac38b2
DE
11080/* Return 1 if operands will generate a valid arguments to rlwimi
11081instruction for insert with right shift in 64-bit mode. The mask may
11082not start on the first bit or stop on the last bit because wrap-around
11083effects of instruction do not correspond to semantics of RTL insn. */
11084
11085int
11086insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11087{
429ec7dc
DE
11088 if (INTVAL (startop) > 32
11089 && INTVAL (startop) < 64
11090 && INTVAL (sizeop) > 1
11091 && INTVAL (sizeop) + INTVAL (startop) < 64
11092 && INTVAL (shiftop) > 0
11093 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11094 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11095 return 1;
11096
11097 return 0;
11098}
11099
35068b43 11100/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11101 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11102
11103int
a2369ed3 11104registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11105{
11106 /* We might have been passed a SUBREG. */
f676971a 11107 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11108 return 0;
f676971a 11109
90f81f99
AP
11110 /* We might have been passed non floating point registers. */
11111 if (!FP_REGNO_P (REGNO (reg1))
11112 || !FP_REGNO_P (REGNO (reg2)))
11113 return 0;
35068b43
RK
11114
11115 return (REGNO (reg1) == REGNO (reg2) - 1);
11116}
11117
a4f6c312
SS
11118/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11119 addr1 and addr2 must be in consecutive memory locations
11120 (addr2 == addr1 + 8). */
35068b43
RK
11121
11122int
90f81f99 11123mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11124{
90f81f99 11125 rtx addr1, addr2;
bb8df8a6
EC
11126 unsigned int reg1, reg2;
11127 int offset1, offset2;
35068b43 11128
90f81f99
AP
11129 /* The mems cannot be volatile. */
11130 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11131 return 0;
f676971a 11132
90f81f99
AP
11133 addr1 = XEXP (mem1, 0);
11134 addr2 = XEXP (mem2, 0);
11135
35068b43
RK
11136 /* Extract an offset (if used) from the first addr. */
11137 if (GET_CODE (addr1) == PLUS)
11138 {
11139 /* If not a REG, return zero. */
11140 if (GET_CODE (XEXP (addr1, 0)) != REG)
11141 return 0;
11142 else
11143 {
c4ad648e 11144 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11145 /* The offset must be constant! */
11146 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11147 return 0;
11148 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11149 }
11150 }
11151 else if (GET_CODE (addr1) != REG)
11152 return 0;
11153 else
11154 {
11155 reg1 = REGNO (addr1);
11156 /* This was a simple (mem (reg)) expression. Offset is 0. */
11157 offset1 = 0;
11158 }
11159
bb8df8a6
EC
11160 /* And now for the second addr. */
11161 if (GET_CODE (addr2) == PLUS)
11162 {
11163 /* If not a REG, return zero. */
11164 if (GET_CODE (XEXP (addr2, 0)) != REG)
11165 return 0;
11166 else
11167 {
11168 reg2 = REGNO (XEXP (addr2, 0));
11169 /* The offset must be constant. */
11170 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11171 return 0;
11172 offset2 = INTVAL (XEXP (addr2, 1));
11173 }
11174 }
11175 else if (GET_CODE (addr2) != REG)
35068b43 11176 return 0;
bb8df8a6
EC
11177 else
11178 {
11179 reg2 = REGNO (addr2);
11180 /* This was a simple (mem (reg)) expression. Offset is 0. */
11181 offset2 = 0;
11182 }
35068b43 11183
bb8df8a6
EC
11184 /* Both of these must have the same base register. */
11185 if (reg1 != reg2)
35068b43
RK
11186 return 0;
11187
11188 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11189 if (offset2 != offset1 + 8)
35068b43
RK
11190 return 0;
11191
11192 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11193 instructions. */
11194 return 1;
11195}
9878760c 11196\f
e41b2a33
PB
11197
11198rtx
11199rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11200{
11201 static bool eliminated = false;
11202 if (mode != SDmode)
11203 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11204 else
11205 {
11206 rtx mem = cfun->machine->sdmode_stack_slot;
11207 gcc_assert (mem != NULL_RTX);
11208
11209 if (!eliminated)
11210 {
11211 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11212 cfun->machine->sdmode_stack_slot = mem;
11213 eliminated = true;
11214 }
11215 return mem;
11216 }
11217}
11218
11219static tree
11220rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11221{
11222 /* Don't walk into types. */
11223 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11224 {
11225 *walk_subtrees = 0;
11226 return NULL_TREE;
11227 }
11228
11229 switch (TREE_CODE (*tp))
11230 {
11231 case VAR_DECL:
11232 case PARM_DECL:
11233 case FIELD_DECL:
11234 case RESULT_DECL:
11235 case REAL_CST:
fdf4f148 11236 case INDIRECT_REF:
a0f39282
JJ
11237 case ALIGN_INDIRECT_REF:
11238 case MISALIGNED_INDIRECT_REF:
fdf4f148 11239 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11240 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11241 return *tp;
11242 break;
11243 default:
11244 break;
11245 }
11246
11247 return NULL_TREE;
11248}
11249
11250
11251/* Allocate a 64-bit stack slot to be used for copying SDmode
11252 values through if this function has any SDmode references. */
11253
11254static void
11255rs6000_alloc_sdmode_stack_slot (void)
11256{
11257 tree t;
11258 basic_block bb;
11259 block_stmt_iterator bsi;
11260
11261 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11262
11263 FOR_EACH_BB (bb)
11264 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11265 {
11266 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11267 rs6000_check_sdmode, NULL);
11268 if (ret)
11269 {
11270 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11271 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11272 SDmode, 0);
11273 return;
11274 }
11275 }
11276
11277 /* Check for any SDmode parameters of the function. */
11278 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11279 {
11280 if (TREE_TYPE (t) == error_mark_node)
11281 continue;
11282
11283 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11284 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11285 {
11286 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11287 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11288 SDmode, 0);
11289 return;
11290 }
11291 }
11292}
11293
11294static void
11295rs6000_instantiate_decls (void)
11296{
11297 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11298 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11299}
11300
9878760c
RK
11301/* Return the register class of a scratch register needed to copy IN into
11302 or out of a register in CLASS in MODE. If it can be done directly,
11303 NO_REGS is returned. */
11304
11305enum reg_class
3c4774e0
R
11306rs6000_secondary_reload_class (enum reg_class class,
11307 enum machine_mode mode ATTRIBUTE_UNUSED,
11308 rtx in)
9878760c 11309{
5accd822 11310 int regno;
9878760c 11311
ab82a49f
AP
11312 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11313#if TARGET_MACHO
c4ad648e 11314 && MACHOPIC_INDIRECT
ab82a49f 11315#endif
c4ad648e 11316 ))
46fad5b7
DJ
11317 {
11318 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11319 other than BASE_REGS for TARGET_ELF. So indicate that a
11320 register from BASE_REGS is needed as an intermediate
11321 register.
f676971a 11322
46fad5b7
DJ
11323 On Darwin, pic addresses require a load from memory, which
11324 needs a base register. */
11325 if (class != BASE_REGS
c4ad648e
AM
11326 && (GET_CODE (in) == SYMBOL_REF
11327 || GET_CODE (in) == HIGH
11328 || GET_CODE (in) == LABEL_REF
11329 || GET_CODE (in) == CONST))
11330 return BASE_REGS;
46fad5b7 11331 }
e7b7998a 11332
5accd822
DE
11333 if (GET_CODE (in) == REG)
11334 {
11335 regno = REGNO (in);
11336 if (regno >= FIRST_PSEUDO_REGISTER)
11337 {
11338 regno = true_regnum (in);
11339 if (regno >= FIRST_PSEUDO_REGISTER)
11340 regno = -1;
11341 }
11342 }
11343 else if (GET_CODE (in) == SUBREG)
11344 {
11345 regno = true_regnum (in);
11346 if (regno >= FIRST_PSEUDO_REGISTER)
11347 regno = -1;
11348 }
11349 else
11350 regno = -1;
11351
9878760c
RK
11352 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11353 into anything. */
11354 if (class == GENERAL_REGS || class == BASE_REGS
11355 || (regno >= 0 && INT_REGNO_P (regno)))
11356 return NO_REGS;
11357
11358 /* Constants, memory, and FP registers can go into FP registers. */
11359 if ((regno == -1 || FP_REGNO_P (regno))
11360 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11361 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11362
0ac081f6
AH
11363 /* Memory, and AltiVec registers can go into AltiVec registers. */
11364 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11365 && class == ALTIVEC_REGS)
11366 return NO_REGS;
11367
9878760c
RK
11368 /* We can copy among the CR registers. */
11369 if ((class == CR_REGS || class == CR0_REGS)
11370 && regno >= 0 && CR_REGNO_P (regno))
11371 return NO_REGS;
11372
11373 /* Otherwise, we need GENERAL_REGS. */
11374 return GENERAL_REGS;
11375}
11376\f
11377/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11378 know this is a valid comparison.
9878760c
RK
11379
11380 SCC_P is 1 if this is for an scc. That means that %D will have been
11381 used instead of %C, so the bits will be in different places.
11382
b4ac57ab 11383 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11384
11385int
a2369ed3 11386ccr_bit (rtx op, int scc_p)
9878760c
RK
11387{
11388 enum rtx_code code = GET_CODE (op);
11389 enum machine_mode cc_mode;
11390 int cc_regnum;
11391 int base_bit;
9ebbca7d 11392 rtx reg;
9878760c 11393
ec8e098d 11394 if (!COMPARISON_P (op))
9878760c
RK
11395 return -1;
11396
9ebbca7d
GK
11397 reg = XEXP (op, 0);
11398
37409796 11399 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11400
11401 cc_mode = GET_MODE (reg);
11402 cc_regnum = REGNO (reg);
11403 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11404
39a10a29 11405 validate_condition_mode (code, cc_mode);
c5defebb 11406
b7053a3f
GK
11407 /* When generating a sCOND operation, only positive conditions are
11408 allowed. */
37409796
NS
11409 gcc_assert (!scc_p
11410 || code == EQ || code == GT || code == LT || code == UNORDERED
11411 || code == GTU || code == LTU);
f676971a 11412
9878760c
RK
11413 switch (code)
11414 {
11415 case NE:
11416 return scc_p ? base_bit + 3 : base_bit + 2;
11417 case EQ:
11418 return base_bit + 2;
1c882ea4 11419 case GT: case GTU: case UNLE:
9878760c 11420 return base_bit + 1;
1c882ea4 11421 case LT: case LTU: case UNGE:
9878760c 11422 return base_bit;
1c882ea4
GK
11423 case ORDERED: case UNORDERED:
11424 return base_bit + 3;
9878760c
RK
11425
11426 case GE: case GEU:
39a10a29 11427 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11428 unordered position. So test that bit. For integer, this is ! LT
11429 unless this is an scc insn. */
39a10a29 11430 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11431
11432 case LE: case LEU:
39a10a29 11433 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11434
9878760c 11435 default:
37409796 11436 gcc_unreachable ();
9878760c
RK
11437 }
11438}
1ff7789b 11439\f
8d30c4ee 11440/* Return the GOT register. */
1ff7789b 11441
9390387d 11442rtx
a2369ed3 11443rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11444{
a4f6c312
SS
11445 /* The second flow pass currently (June 1999) can't update
11446 regs_ever_live without disturbing other parts of the compiler, so
11447 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11448 if (!can_create_pseudo_p ()
11449 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11450 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11451
8d30c4ee 11452 current_function_uses_pic_offset_table = 1;
3cb999d8 11453
1ff7789b
MM
11454 return pic_offset_table_rtx;
11455}
a7df97e6 11456\f
e2500fed
GK
11457/* Function to init struct machine_function.
11458 This will be called, via a pointer variable,
11459 from push_function_context. */
a7df97e6 11460
e2500fed 11461static struct machine_function *
863d938c 11462rs6000_init_machine_status (void)
a7df97e6 11463{
e2500fed 11464 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11465}
9878760c 11466\f
0ba1b2ff
AM
11467/* These macros test for integers and extract the low-order bits. */
11468#define INT_P(X) \
11469((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11470 && GET_MODE (X) == VOIDmode)
11471
11472#define INT_LOWPART(X) \
11473 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11474
11475int
a2369ed3 11476extract_MB (rtx op)
0ba1b2ff
AM
11477{
11478 int i;
11479 unsigned long val = INT_LOWPART (op);
11480
11481 /* If the high bit is zero, the value is the first 1 bit we find
11482 from the left. */
11483 if ((val & 0x80000000) == 0)
11484 {
37409796 11485 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11486
11487 i = 1;
11488 while (((val <<= 1) & 0x80000000) == 0)
11489 ++i;
11490 return i;
11491 }
11492
11493 /* If the high bit is set and the low bit is not, or the mask is all
11494 1's, the value is zero. */
11495 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11496 return 0;
11497
11498 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11499 from the right. */
11500 i = 31;
11501 while (((val >>= 1) & 1) != 0)
11502 --i;
11503
11504 return i;
11505}
11506
11507int
a2369ed3 11508extract_ME (rtx op)
0ba1b2ff
AM
11509{
11510 int i;
11511 unsigned long val = INT_LOWPART (op);
11512
11513 /* If the low bit is zero, the value is the first 1 bit we find from
11514 the right. */
11515 if ((val & 1) == 0)
11516 {
37409796 11517 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11518
11519 i = 30;
11520 while (((val >>= 1) & 1) == 0)
11521 --i;
11522
11523 return i;
11524 }
11525
11526 /* If the low bit is set and the high bit is not, or the mask is all
11527 1's, the value is 31. */
11528 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11529 return 31;
11530
11531 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11532 from the left. */
11533 i = 0;
11534 while (((val <<= 1) & 0x80000000) != 0)
11535 ++i;
11536
11537 return i;
11538}
11539
c4501e62
JJ
11540/* Locate some local-dynamic symbol still in use by this function
11541 so that we can print its name in some tls_ld pattern. */
11542
11543static const char *
863d938c 11544rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11545{
11546 rtx insn;
11547
11548 if (cfun->machine->some_ld_name)
11549 return cfun->machine->some_ld_name;
11550
11551 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11552 if (INSN_P (insn)
11553 && for_each_rtx (&PATTERN (insn),
11554 rs6000_get_some_local_dynamic_name_1, 0))
11555 return cfun->machine->some_ld_name;
11556
37409796 11557 gcc_unreachable ();
c4501e62
JJ
11558}
11559
11560/* Helper function for rs6000_get_some_local_dynamic_name. */
11561
11562static int
a2369ed3 11563rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11564{
11565 rtx x = *px;
11566
11567 if (GET_CODE (x) == SYMBOL_REF)
11568 {
11569 const char *str = XSTR (x, 0);
11570 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11571 {
11572 cfun->machine->some_ld_name = str;
11573 return 1;
11574 }
11575 }
11576
11577 return 0;
11578}
11579
85b776df
AM
11580/* Write out a function code label. */
11581
11582void
11583rs6000_output_function_entry (FILE *file, const char *fname)
11584{
11585 if (fname[0] != '.')
11586 {
11587 switch (DEFAULT_ABI)
11588 {
11589 default:
37409796 11590 gcc_unreachable ();
85b776df
AM
11591
11592 case ABI_AIX:
11593 if (DOT_SYMBOLS)
11594 putc ('.', file);
11595 else
11596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11597 break;
11598
11599 case ABI_V4:
11600 case ABI_DARWIN:
11601 break;
11602 }
11603 }
11604 if (TARGET_AIX)
11605 RS6000_OUTPUT_BASENAME (file, fname);
11606 else
11607 assemble_name (file, fname);
11608}
11609
9878760c
RK
11610/* Print an operand. Recognize special options, documented below. */
11611
38c1f2d7 11612#if TARGET_ELF
d9407988 11613#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11614#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11615#else
11616#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11617#define SMALL_DATA_REG 0
ba5e43aa
MM
11618#endif
11619
9878760c 11620void
a2369ed3 11621print_operand (FILE *file, rtx x, int code)
9878760c
RK
11622{
11623 int i;
a260abc9 11624 HOST_WIDE_INT val;
0ba1b2ff 11625 unsigned HOST_WIDE_INT uval;
9878760c
RK
11626
11627 switch (code)
11628 {
a8b3aeda 11629 case '.':
a85d226b
RK
11630 /* Write out an instruction after the call which may be replaced
11631 with glue code by the loader. This depends on the AIX version. */
11632 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11633 return;
11634
81eace42
GK
11635 /* %a is output_address. */
11636
9854d9ed
RK
11637 case 'A':
11638 /* If X is a constant integer whose low-order 5 bits are zero,
11639 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11640 in the AIX assembler where "sri" with a zero shift count
20e26713 11641 writes a trash instruction. */
9854d9ed 11642 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11643 putc ('l', file);
9854d9ed 11644 else
76229ac8 11645 putc ('r', file);
9854d9ed
RK
11646 return;
11647
11648 case 'b':
e2c953b6
DE
11649 /* If constant, low-order 16 bits of constant, unsigned.
11650 Otherwise, write normally. */
11651 if (INT_P (x))
11652 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11653 else
11654 print_operand (file, x, 0);
cad12a8d
RK
11655 return;
11656
a260abc9
DE
11657 case 'B':
11658 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11659 for 64-bit mask direction. */
9390387d 11660 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11661 return;
a260abc9 11662
81eace42
GK
11663 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11664 output_operand. */
11665
423c1189
AH
11666 case 'c':
11667 /* X is a CR register. Print the number of the GT bit of the CR. */
11668 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11669 output_operand_lossage ("invalid %%E value");
11670 else
11671 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11672 return;
11673
11674 case 'D':
cef6b86c 11675 /* Like 'J' but get to the GT bit only. */
37409796 11676 gcc_assert (GET_CODE (x) == REG);
423c1189 11677
cef6b86c
EB
11678 /* Bit 1 is GT bit. */
11679 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11680
cef6b86c
EB
11681 /* Add one for shift count in rlinm for scc. */
11682 fprintf (file, "%d", i + 1);
423c1189
AH
11683 return;
11684
9854d9ed 11685 case 'E':
39a10a29 11686 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11687 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11688 output_operand_lossage ("invalid %%E value");
78fbdbf7 11689 else
39a10a29 11690 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11691 return;
9854d9ed
RK
11692
11693 case 'f':
11694 /* X is a CR register. Print the shift count needed to move it
11695 to the high-order four bits. */
11696 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11697 output_operand_lossage ("invalid %%f value");
11698 else
9ebbca7d 11699 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11700 return;
11701
11702 case 'F':
11703 /* Similar, but print the count for the rotate in the opposite
11704 direction. */
11705 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11706 output_operand_lossage ("invalid %%F value");
11707 else
9ebbca7d 11708 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11709 return;
11710
11711 case 'G':
11712 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11713 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11714 if (GET_CODE (x) != CONST_INT)
11715 output_operand_lossage ("invalid %%G value");
11716 else if (INTVAL (x) >= 0)
76229ac8 11717 putc ('z', file);
9854d9ed 11718 else
76229ac8 11719 putc ('m', file);
9854d9ed 11720 return;
e2c953b6 11721
9878760c 11722 case 'h':
a4f6c312
SS
11723 /* If constant, output low-order five bits. Otherwise, write
11724 normally. */
9878760c 11725 if (INT_P (x))
5f59ecb7 11726 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11727 else
11728 print_operand (file, x, 0);
11729 return;
11730
64305719 11731 case 'H':
a4f6c312
SS
11732 /* If constant, output low-order six bits. Otherwise, write
11733 normally. */
64305719 11734 if (INT_P (x))
5f59ecb7 11735 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11736 else
11737 print_operand (file, x, 0);
11738 return;
11739
9854d9ed
RK
11740 case 'I':
11741 /* Print `i' if this is a constant, else nothing. */
9878760c 11742 if (INT_P (x))
76229ac8 11743 putc ('i', file);
9878760c
RK
11744 return;
11745
9854d9ed
RK
11746 case 'j':
11747 /* Write the bit number in CCR for jump. */
11748 i = ccr_bit (x, 0);
11749 if (i == -1)
11750 output_operand_lossage ("invalid %%j code");
9878760c 11751 else
9854d9ed 11752 fprintf (file, "%d", i);
9878760c
RK
11753 return;
11754
9854d9ed
RK
11755 case 'J':
11756 /* Similar, but add one for shift count in rlinm for scc and pass
11757 scc flag to `ccr_bit'. */
11758 i = ccr_bit (x, 1);
11759 if (i == -1)
11760 output_operand_lossage ("invalid %%J code");
11761 else
a0466a68
RK
11762 /* If we want bit 31, write a shift count of zero, not 32. */
11763 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11764 return;
11765
9854d9ed
RK
11766 case 'k':
11767 /* X must be a constant. Write the 1's complement of the
11768 constant. */
9878760c 11769 if (! INT_P (x))
9854d9ed 11770 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11771 else
11772 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11773 return;
11774
81eace42 11775 case 'K':
9ebbca7d
GK
11776 /* X must be a symbolic constant on ELF. Write an
11777 expression suitable for an 'addi' that adds in the low 16
11778 bits of the MEM. */
11779 if (GET_CODE (x) != CONST)
11780 {
11781 print_operand_address (file, x);
11782 fputs ("@l", file);
11783 }
11784 else
11785 {
11786 if (GET_CODE (XEXP (x, 0)) != PLUS
11787 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11788 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11789 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11790 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11791 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11792 fputs ("@l", file);
ed8d2920
MM
11793 /* For GNU as, there must be a non-alphanumeric character
11794 between 'l' and the number. The '-' is added by
11795 print_operand() already. */
11796 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11797 fputs ("+", file);
9ebbca7d
GK
11798 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11799 }
81eace42
GK
11800 return;
11801
11802 /* %l is output_asm_label. */
9ebbca7d 11803
9854d9ed
RK
11804 case 'L':
11805 /* Write second word of DImode or DFmode reference. Works on register
11806 or non-indexed memory only. */
11807 if (GET_CODE (x) == REG)
fb5c67a7 11808 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11809 else if (GET_CODE (x) == MEM)
11810 {
11811 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11812 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11813 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11814 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11815 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11816 UNITS_PER_WORD));
6fb5fa3c
DB
11817 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11818 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11819 UNITS_PER_WORD));
9854d9ed 11820 else
d7624dc0
RK
11821 output_address (XEXP (adjust_address_nv (x, SImode,
11822 UNITS_PER_WORD),
11823 0));
ed8908e7 11824
ba5e43aa 11825 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11826 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11827 reg_names[SMALL_DATA_REG]);
9854d9ed 11828 }
9878760c 11829 return;
f676971a 11830
9878760c
RK
11831 case 'm':
11832 /* MB value for a mask operand. */
b1765bde 11833 if (! mask_operand (x, SImode))
9878760c
RK
11834 output_operand_lossage ("invalid %%m value");
11835
0ba1b2ff 11836 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11837 return;
11838
11839 case 'M':
11840 /* ME value for a mask operand. */
b1765bde 11841 if (! mask_operand (x, SImode))
a260abc9 11842 output_operand_lossage ("invalid %%M value");
9878760c 11843
0ba1b2ff 11844 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11845 return;
11846
81eace42
GK
11847 /* %n outputs the negative of its operand. */
11848
9878760c
RK
11849 case 'N':
11850 /* Write the number of elements in the vector times 4. */
11851 if (GET_CODE (x) != PARALLEL)
11852 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11853 else
11854 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11855 return;
11856
11857 case 'O':
11858 /* Similar, but subtract 1 first. */
11859 if (GET_CODE (x) != PARALLEL)
1427100a 11860 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11861 else
11862 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11863 return;
11864
9854d9ed
RK
11865 case 'p':
11866 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11867 if (! INT_P (x)
2bfcf297 11868 || INT_LOWPART (x) < 0
9854d9ed
RK
11869 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11870 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11871 else
11872 fprintf (file, "%d", i);
9854d9ed
RK
11873 return;
11874
9878760c
RK
11875 case 'P':
11876 /* The operand must be an indirect memory reference. The result
8bb418a3 11877 is the register name. */
9878760c
RK
11878 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11879 || REGNO (XEXP (x, 0)) >= 32)
11880 output_operand_lossage ("invalid %%P value");
e2c953b6 11881 else
fb5c67a7 11882 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11883 return;
11884
dfbdccdb
GK
11885 case 'q':
11886 /* This outputs the logical code corresponding to a boolean
11887 expression. The expression may have one or both operands
39a10a29 11888 negated (if one, only the first one). For condition register
c4ad648e
AM
11889 logical operations, it will also treat the negated
11890 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11891 {
63bc1d05 11892 const char *const *t = 0;
dfbdccdb
GK
11893 const char *s;
11894 enum rtx_code code = GET_CODE (x);
11895 static const char * const tbl[3][3] = {
11896 { "and", "andc", "nor" },
11897 { "or", "orc", "nand" },
11898 { "xor", "eqv", "xor" } };
11899
11900 if (code == AND)
11901 t = tbl[0];
11902 else if (code == IOR)
11903 t = tbl[1];
11904 else if (code == XOR)
11905 t = tbl[2];
11906 else
11907 output_operand_lossage ("invalid %%q value");
11908
11909 if (GET_CODE (XEXP (x, 0)) != NOT)
11910 s = t[0];
11911 else
11912 {
11913 if (GET_CODE (XEXP (x, 1)) == NOT)
11914 s = t[2];
11915 else
11916 s = t[1];
11917 }
f676971a 11918
dfbdccdb
GK
11919 fputs (s, file);
11920 }
11921 return;
11922
2c4a9cff
DE
11923 case 'Q':
11924 if (TARGET_MFCRF)
3b6ce0af 11925 fputc (',', file);
5efb1046 11926 /* FALLTHRU */
2c4a9cff
DE
11927 else
11928 return;
11929
9854d9ed
RK
11930 case 'R':
11931 /* X is a CR register. Print the mask for `mtcrf'. */
11932 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11933 output_operand_lossage ("invalid %%R value");
11934 else
9ebbca7d 11935 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11936 return;
9854d9ed
RK
11937
11938 case 's':
11939 /* Low 5 bits of 32 - value */
11940 if (! INT_P (x))
11941 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11942 else
11943 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11944 return;
9854d9ed 11945
a260abc9 11946 case 'S':
0ba1b2ff 11947 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11948 CONST_INT 32-bit mask is considered sign-extended so any
11949 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11950 if (! mask64_operand (x, DImode))
a260abc9
DE
11951 output_operand_lossage ("invalid %%S value");
11952
0ba1b2ff 11953 uval = INT_LOWPART (x);
a260abc9 11954
0ba1b2ff 11955 if (uval & 1) /* Clear Left */
a260abc9 11956 {
f099d360
GK
11957#if HOST_BITS_PER_WIDE_INT > 64
11958 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11959#endif
0ba1b2ff 11960 i = 64;
a260abc9 11961 }
0ba1b2ff 11962 else /* Clear Right */
a260abc9 11963 {
0ba1b2ff 11964 uval = ~uval;
f099d360
GK
11965#if HOST_BITS_PER_WIDE_INT > 64
11966 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11967#endif
0ba1b2ff 11968 i = 63;
a260abc9 11969 }
0ba1b2ff
AM
11970 while (uval != 0)
11971 --i, uval >>= 1;
37409796 11972 gcc_assert (i >= 0);
0ba1b2ff
AM
11973 fprintf (file, "%d", i);
11974 return;
a260abc9 11975
a3170dc6
AH
11976 case 't':
11977 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11978 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11979
11980 /* Bit 3 is OV bit. */
11981 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11982
11983 /* If we want bit 31, write a shift count of zero, not 32. */
11984 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11985 return;
11986
cccf3bdc
DE
11987 case 'T':
11988 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11989 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11990 && REGNO (x) != CTR_REGNO))
cccf3bdc 11991 output_operand_lossage ("invalid %%T value");
1de43f85 11992 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11993 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11994 else
11995 fputs ("ctr", file);
11996 return;
11997
9854d9ed 11998 case 'u':
802a0058 11999 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12000 if (! INT_P (x))
12001 output_operand_lossage ("invalid %%u value");
e2c953b6 12002 else
f676971a 12003 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12004 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12005 return;
12006
802a0058
MM
12007 case 'v':
12008 /* High-order 16 bits of constant for use in signed operand. */
12009 if (! INT_P (x))
12010 output_operand_lossage ("invalid %%v value");
e2c953b6 12011 else
134c32f6
DE
12012 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12013 (INT_LOWPART (x) >> 16) & 0xffff);
12014 return;
802a0058 12015
9854d9ed
RK
12016 case 'U':
12017 /* Print `u' if this has an auto-increment or auto-decrement. */
12018 if (GET_CODE (x) == MEM
12019 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12020 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12021 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12022 putc ('u', file);
9854d9ed 12023 return;
9878760c 12024
e0cd0770
JC
12025 case 'V':
12026 /* Print the trap code for this operand. */
12027 switch (GET_CODE (x))
12028 {
12029 case EQ:
12030 fputs ("eq", file); /* 4 */
12031 break;
12032 case NE:
12033 fputs ("ne", file); /* 24 */
12034 break;
12035 case LT:
12036 fputs ("lt", file); /* 16 */
12037 break;
12038 case LE:
12039 fputs ("le", file); /* 20 */
12040 break;
12041 case GT:
12042 fputs ("gt", file); /* 8 */
12043 break;
12044 case GE:
12045 fputs ("ge", file); /* 12 */
12046 break;
12047 case LTU:
12048 fputs ("llt", file); /* 2 */
12049 break;
12050 case LEU:
12051 fputs ("lle", file); /* 6 */
12052 break;
12053 case GTU:
12054 fputs ("lgt", file); /* 1 */
12055 break;
12056 case GEU:
12057 fputs ("lge", file); /* 5 */
12058 break;
12059 default:
37409796 12060 gcc_unreachable ();
e0cd0770
JC
12061 }
12062 break;
12063
9854d9ed
RK
12064 case 'w':
12065 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12066 normally. */
12067 if (INT_P (x))
f676971a 12068 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12069 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12070 else
12071 print_operand (file, x, 0);
9878760c
RK
12072 return;
12073
9854d9ed 12074 case 'W':
e2c953b6 12075 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12076 val = (GET_CODE (x) == CONST_INT
12077 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12078
12079 if (val < 0)
12080 i = -1;
9854d9ed 12081 else
e2c953b6
DE
12082 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12083 if ((val <<= 1) < 0)
12084 break;
12085
12086#if HOST_BITS_PER_WIDE_INT == 32
12087 if (GET_CODE (x) == CONST_INT && i >= 0)
12088 i += 32; /* zero-extend high-part was all 0's */
12089 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12090 {
12091 val = CONST_DOUBLE_LOW (x);
12092
37409796
NS
12093 gcc_assert (val);
12094 if (val < 0)
e2c953b6
DE
12095 --i;
12096 else
12097 for ( ; i < 64; i++)
12098 if ((val <<= 1) < 0)
12099 break;
12100 }
12101#endif
12102
12103 fprintf (file, "%d", i + 1);
9854d9ed 12104 return;
9878760c 12105
9854d9ed
RK
12106 case 'X':
12107 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12108 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12109 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12110 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12111 putc ('x', file);
9854d9ed 12112 return;
9878760c 12113
9854d9ed
RK
12114 case 'Y':
12115 /* Like 'L', for third word of TImode */
12116 if (GET_CODE (x) == REG)
fb5c67a7 12117 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12118 else if (GET_CODE (x) == MEM)
9878760c 12119 {
9854d9ed
RK
12120 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12121 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12122 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12123 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12124 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12125 else
d7624dc0 12126 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12127 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12128 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12129 reg_names[SMALL_DATA_REG]);
9878760c
RK
12130 }
12131 return;
f676971a 12132
9878760c 12133 case 'z':
b4ac57ab
RS
12134 /* X is a SYMBOL_REF. Write out the name preceded by a
12135 period and without any trailing data in brackets. Used for function
4d30c363
MM
12136 names. If we are configured for System V (or the embedded ABI) on
12137 the PowerPC, do not emit the period, since those systems do not use
12138 TOCs and the like. */
37409796 12139 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12140
c4ad648e
AM
12141 /* Mark the decl as referenced so that cgraph will output the
12142 function. */
9bf6462a 12143 if (SYMBOL_REF_DECL (x))
c4ad648e 12144 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12145
85b776df 12146 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12147 if (TARGET_MACHO)
12148 {
12149 const char *name = XSTR (x, 0);
a031e781 12150#if TARGET_MACHO
3b48085e 12151 if (MACHOPIC_INDIRECT
11abc112
MM
12152 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12153 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12154#endif
12155 assemble_name (file, name);
12156 }
85b776df 12157 else if (!DOT_SYMBOLS)
9739c90c 12158 assemble_name (file, XSTR (x, 0));
85b776df
AM
12159 else
12160 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12161 return;
12162
9854d9ed
RK
12163 case 'Z':
12164 /* Like 'L', for last word of TImode. */
12165 if (GET_CODE (x) == REG)
fb5c67a7 12166 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12167 else if (GET_CODE (x) == MEM)
12168 {
12169 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12170 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12171 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12172 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12173 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12174 else
d7624dc0 12175 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12176 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12177 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12178 reg_names[SMALL_DATA_REG]);
9854d9ed 12179 }
5c23c401 12180 return;
0ac081f6 12181
a3170dc6 12182 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12183 case 'y':
12184 {
12185 rtx tmp;
12186
37409796 12187 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12188
12189 tmp = XEXP (x, 0);
12190
90d3ff1c 12191 /* Ugly hack because %y is overloaded. */
8ef65e3d 12192 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12193 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12194 || GET_MODE (x) == TFmode
12195 || GET_MODE (x) == TImode))
a3170dc6
AH
12196 {
12197 /* Handle [reg]. */
12198 if (GET_CODE (tmp) == REG)
12199 {
12200 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12201 break;
12202 }
12203 /* Handle [reg+UIMM]. */
12204 else if (GET_CODE (tmp) == PLUS &&
12205 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12206 {
12207 int x;
12208
37409796 12209 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12210
12211 x = INTVAL (XEXP (tmp, 1));
12212 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12213 break;
12214 }
12215
12216 /* Fall through. Must be [reg+reg]. */
12217 }
850e8d3d
DN
12218 if (TARGET_ALTIVEC
12219 && GET_CODE (tmp) == AND
12220 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12221 && INTVAL (XEXP (tmp, 1)) == -16)
12222 tmp = XEXP (tmp, 0);
0ac081f6 12223 if (GET_CODE (tmp) == REG)
c62f2db5 12224 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12225 else
0ac081f6 12226 {
37409796 12227 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
12228 && REG_P (XEXP (tmp, 0))
12229 && REG_P (XEXP (tmp, 1)));
bb8df8a6 12230
0ac081f6
AH
12231 if (REGNO (XEXP (tmp, 0)) == 0)
12232 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12233 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12234 else
12235 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12236 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12237 }
0ac081f6
AH
12238 break;
12239 }
f676971a 12240
9878760c
RK
12241 case 0:
12242 if (GET_CODE (x) == REG)
12243 fprintf (file, "%s", reg_names[REGNO (x)]);
12244 else if (GET_CODE (x) == MEM)
12245 {
12246 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12247 know the width from the mode. */
12248 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12249 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12250 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12251 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12252 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12253 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12254 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12255 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12256 else
a54d04b7 12257 output_address (XEXP (x, 0));
9878760c
RK
12258 }
12259 else
a54d04b7 12260 output_addr_const (file, x);
a85d226b 12261 return;
9878760c 12262
c4501e62
JJ
12263 case '&':
12264 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12265 return;
12266
9878760c
RK
12267 default:
12268 output_operand_lossage ("invalid %%xn code");
12269 }
12270}
12271\f
12272/* Print the address of an operand. */
12273
12274void
a2369ed3 12275print_operand_address (FILE *file, rtx x)
9878760c
RK
12276{
12277 if (GET_CODE (x) == REG)
4697a36c 12278 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12279 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12280 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12281 {
12282 output_addr_const (file, x);
ba5e43aa 12283 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12284 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12285 reg_names[SMALL_DATA_REG]);
37409796
NS
12286 else
12287 gcc_assert (!TARGET_TOC);
9878760c
RK
12288 }
12289 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12290 {
9024f4b8 12291 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12292 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12293 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12294 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12295 else
4697a36c
MM
12296 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12297 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12298 }
12299 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12300 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12301 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12302#if TARGET_ELF
12303 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12304 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12305 {
12306 output_addr_const (file, XEXP (x, 1));
12307 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12308 }
c859cda6
DJ
12309#endif
12310#if TARGET_MACHO
12311 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12312 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12313 {
12314 fprintf (file, "lo16(");
12315 output_addr_const (file, XEXP (x, 1));
12316 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12317 }
3cb999d8 12318#endif
4d588c14 12319 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12320 {
2bfcf297 12321 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12322 {
2bfcf297
DB
12323 rtx contains_minus = XEXP (x, 1);
12324 rtx minus, symref;
12325 const char *name;
f676971a 12326
9ebbca7d 12327 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12328 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12329 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12330 contains_minus = XEXP (contains_minus, 0);
12331
2bfcf297
DB
12332 minus = XEXP (contains_minus, 0);
12333 symref = XEXP (minus, 0);
12334 XEXP (contains_minus, 0) = symref;
12335 if (TARGET_ELF)
12336 {
12337 char *newname;
12338
12339 name = XSTR (symref, 0);
12340 newname = alloca (strlen (name) + sizeof ("@toc"));
12341 strcpy (newname, name);
12342 strcat (newname, "@toc");
12343 XSTR (symref, 0) = newname;
12344 }
12345 output_addr_const (file, XEXP (x, 1));
12346 if (TARGET_ELF)
12347 XSTR (symref, 0) = name;
9ebbca7d
GK
12348 XEXP (contains_minus, 0) = minus;
12349 }
12350 else
12351 output_addr_const (file, XEXP (x, 1));
12352
12353 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12354 }
9878760c 12355 else
37409796 12356 gcc_unreachable ();
9878760c
RK
12357}
12358\f
88cad84b 12359/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12360 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12361 is defined. It also needs to handle DI-mode objects on 64-bit
12362 targets. */
12363
12364static bool
a2369ed3 12365rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12366{
f4f4921e 12367#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12368 /* Special handling for SI values. */
84dcde01 12369 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12370 {
301d03af 12371 static int recurse = 0;
f676971a 12372
301d03af
RS
12373 /* For -mrelocatable, we mark all addresses that need to be fixed up
12374 in the .fixup section. */
12375 if (TARGET_RELOCATABLE
d6b5193b
RS
12376 && in_section != toc_section
12377 && in_section != text_section
4325ca90 12378 && !unlikely_text_section_p (in_section)
301d03af
RS
12379 && !recurse
12380 && GET_CODE (x) != CONST_INT
12381 && GET_CODE (x) != CONST_DOUBLE
12382 && CONSTANT_P (x))
12383 {
12384 char buf[256];
12385
12386 recurse = 1;
12387 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12388 fixuplabelno++;
12389 ASM_OUTPUT_LABEL (asm_out_file, buf);
12390 fprintf (asm_out_file, "\t.long\t(");
12391 output_addr_const (asm_out_file, x);
12392 fprintf (asm_out_file, ")@fixup\n");
12393 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12394 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12395 fprintf (asm_out_file, "\t.long\t");
12396 assemble_name (asm_out_file, buf);
12397 fprintf (asm_out_file, "\n\t.previous\n");
12398 recurse = 0;
12399 return true;
12400 }
12401 /* Remove initial .'s to turn a -mcall-aixdesc function
12402 address into the address of the descriptor, not the function
12403 itself. */
12404 else if (GET_CODE (x) == SYMBOL_REF
12405 && XSTR (x, 0)[0] == '.'
12406 && DEFAULT_ABI == ABI_AIX)
12407 {
12408 const char *name = XSTR (x, 0);
12409 while (*name == '.')
12410 name++;
12411
12412 fprintf (asm_out_file, "\t.long\t%s\n", name);
12413 return true;
12414 }
12415 }
f4f4921e 12416#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12417 return default_assemble_integer (x, size, aligned_p);
12418}
93638d7a
AM
12419
12420#ifdef HAVE_GAS_HIDDEN
12421/* Emit an assembler directive to set symbol visibility for DECL to
12422 VISIBILITY_TYPE. */
12423
5add3202 12424static void
a2369ed3 12425rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12426{
93638d7a
AM
12427 /* Functions need to have their entry point symbol visibility set as
12428 well as their descriptor symbol visibility. */
85b776df
AM
12429 if (DEFAULT_ABI == ABI_AIX
12430 && DOT_SYMBOLS
12431 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12432 {
25fdb4dc 12433 static const char * const visibility_types[] = {
c4ad648e 12434 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12435 };
12436
12437 const char *name, *type;
93638d7a
AM
12438
12439 name = ((* targetm.strip_name_encoding)
12440 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12441 type = visibility_types[vis];
93638d7a 12442
25fdb4dc
RH
12443 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12444 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12445 }
25fdb4dc
RH
12446 else
12447 default_assemble_visibility (decl, vis);
93638d7a
AM
12448}
12449#endif
301d03af 12450\f
39a10a29 12451enum rtx_code
a2369ed3 12452rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12453{
12454 /* Reversal of FP compares takes care -- an ordered compare
12455 becomes an unordered compare and vice versa. */
f676971a 12456 if (mode == CCFPmode
bc9ec0e0
GK
12457 && (!flag_finite_math_only
12458 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12459 || code == UNEQ || code == LTGT))
bab6226b 12460 return reverse_condition_maybe_unordered (code);
39a10a29 12461 else
bab6226b 12462 return reverse_condition (code);
39a10a29
GK
12463}
12464
39a10a29
GK
12465/* Generate a compare for CODE. Return a brand-new rtx that
12466 represents the result of the compare. */
a4f6c312 12467
39a10a29 12468static rtx
a2369ed3 12469rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12470{
12471 enum machine_mode comp_mode;
12472 rtx compare_result;
12473
12474 if (rs6000_compare_fp_p)
12475 comp_mode = CCFPmode;
12476 else if (code == GTU || code == LTU
c4ad648e 12477 || code == GEU || code == LEU)
39a10a29 12478 comp_mode = CCUNSmode;
60934f9c
NS
12479 else if ((code == EQ || code == NE)
12480 && GET_CODE (rs6000_compare_op0) == SUBREG
12481 && GET_CODE (rs6000_compare_op1) == SUBREG
12482 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12483 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12484 /* These are unsigned values, perhaps there will be a later
12485 ordering compare that can be shared with this one.
12486 Unfortunately we cannot detect the signedness of the operands
12487 for non-subregs. */
12488 comp_mode = CCUNSmode;
39a10a29
GK
12489 else
12490 comp_mode = CCmode;
12491
12492 /* First, the compare. */
12493 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12494
cef6b86c 12495 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12496 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12497 && rs6000_compare_fp_p)
a3170dc6 12498 {
64022b5d 12499 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12500 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12501
12502 if (op_mode == VOIDmode)
12503 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12504
cef6b86c
EB
12505 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12506 This explains the following mess. */
423c1189 12507
a3170dc6
AH
12508 switch (code)
12509 {
423c1189 12510 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12511 switch (op_mode)
12512 {
12513 case SFmode:
12514 cmp = flag_unsafe_math_optimizations
12515 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12516 rs6000_compare_op1)
12517 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12518 rs6000_compare_op1);
12519 break;
12520
12521 case DFmode:
12522 cmp = flag_unsafe_math_optimizations
12523 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12524 rs6000_compare_op1)
12525 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12526 rs6000_compare_op1);
12527 break;
12528
17caeff2
JM
12529 case TFmode:
12530 cmp = flag_unsafe_math_optimizations
12531 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12532 rs6000_compare_op1)
12533 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12534 rs6000_compare_op1);
12535 break;
12536
37409796
NS
12537 default:
12538 gcc_unreachable ();
12539 }
a3170dc6 12540 break;
bb8df8a6 12541
423c1189 12542 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12543 switch (op_mode)
12544 {
12545 case SFmode:
12546 cmp = flag_unsafe_math_optimizations
12547 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12548 rs6000_compare_op1)
12549 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12550 rs6000_compare_op1);
12551 break;
bb8df8a6 12552
37409796
NS
12553 case DFmode:
12554 cmp = flag_unsafe_math_optimizations
12555 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12556 rs6000_compare_op1)
12557 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12558 rs6000_compare_op1);
12559 break;
12560
17caeff2
JM
12561 case TFmode:
12562 cmp = flag_unsafe_math_optimizations
12563 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12564 rs6000_compare_op1)
12565 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12566 rs6000_compare_op1);
12567 break;
12568
37409796
NS
12569 default:
12570 gcc_unreachable ();
12571 }
a3170dc6 12572 break;
bb8df8a6 12573
423c1189 12574 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12575 switch (op_mode)
12576 {
12577 case SFmode:
12578 cmp = flag_unsafe_math_optimizations
12579 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12580 rs6000_compare_op1)
12581 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12582 rs6000_compare_op1);
12583 break;
bb8df8a6 12584
37409796
NS
12585 case DFmode:
12586 cmp = flag_unsafe_math_optimizations
12587 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12588 rs6000_compare_op1)
12589 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12590 rs6000_compare_op1);
12591 break;
12592
17caeff2
JM
12593 case TFmode:
12594 cmp = flag_unsafe_math_optimizations
12595 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12596 rs6000_compare_op1)
12597 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12598 rs6000_compare_op1);
12599 break;
12600
37409796
NS
12601 default:
12602 gcc_unreachable ();
12603 }
a3170dc6 12604 break;
4d4cbc0e 12605 default:
37409796 12606 gcc_unreachable ();
a3170dc6
AH
12607 }
12608
12609 /* Synthesize LE and GE from LT/GT || EQ. */
12610 if (code == LE || code == GE || code == LEU || code == GEU)
12611 {
a3170dc6
AH
12612 emit_insn (cmp);
12613
12614 switch (code)
12615 {
12616 case LE: code = LT; break;
12617 case GE: code = GT; break;
12618 case LEU: code = LT; break;
12619 case GEU: code = GT; break;
37409796 12620 default: gcc_unreachable ();
a3170dc6
AH
12621 }
12622
a3170dc6
AH
12623 compare_result2 = gen_reg_rtx (CCFPmode);
12624
12625 /* Do the EQ. */
37409796
NS
12626 switch (op_mode)
12627 {
12628 case SFmode:
12629 cmp = flag_unsafe_math_optimizations
12630 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12631 rs6000_compare_op1)
12632 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12633 rs6000_compare_op1);
12634 break;
12635
12636 case DFmode:
12637 cmp = flag_unsafe_math_optimizations
12638 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12639 rs6000_compare_op1)
12640 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12641 rs6000_compare_op1);
12642 break;
12643
17caeff2
JM
12644 case TFmode:
12645 cmp = flag_unsafe_math_optimizations
12646 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12647 rs6000_compare_op1)
12648 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12649 rs6000_compare_op1);
12650 break;
12651
37409796
NS
12652 default:
12653 gcc_unreachable ();
12654 }
a3170dc6
AH
12655 emit_insn (cmp);
12656
a3170dc6 12657 /* OR them together. */
64022b5d
AH
12658 or_result = gen_reg_rtx (CCFPmode);
12659 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12660 compare_result2);
a3170dc6
AH
12661 compare_result = or_result;
12662 code = EQ;
12663 }
12664 else
12665 {
a3170dc6 12666 if (code == NE || code == LTGT)
a3170dc6 12667 code = NE;
423c1189
AH
12668 else
12669 code = EQ;
a3170dc6
AH
12670 }
12671
12672 emit_insn (cmp);
12673 }
12674 else
de17c25f
DE
12675 {
12676 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12677 CLOBBERs to match cmptf_internal2 pattern. */
12678 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12679 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12680 && !TARGET_IEEEQUAD
de17c25f
DE
12681 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12682 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12683 gen_rtvec (9,
12684 gen_rtx_SET (VOIDmode,
12685 compare_result,
12686 gen_rtx_COMPARE (comp_mode,
12687 rs6000_compare_op0,
12688 rs6000_compare_op1)),
12689 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12690 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12691 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12692 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12693 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12694 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12695 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12696 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12697 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12698 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12699 {
12700 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12701 comp_mode = CCEQmode;
12702 compare_result = gen_reg_rtx (CCEQmode);
12703 if (TARGET_64BIT)
12704 emit_insn (gen_stack_protect_testdi (compare_result,
12705 rs6000_compare_op0, op1));
12706 else
12707 emit_insn (gen_stack_protect_testsi (compare_result,
12708 rs6000_compare_op0, op1));
12709 }
de17c25f
DE
12710 else
12711 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12712 gen_rtx_COMPARE (comp_mode,
12713 rs6000_compare_op0,
12714 rs6000_compare_op1)));
12715 }
f676971a 12716
ca5adc63 12717 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12718 under flag_finite_math_only we don't bother. */
39a10a29 12719 if (rs6000_compare_fp_p
e7108df9 12720 && !flag_finite_math_only
8ef65e3d 12721 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12722 && (code == LE || code == GE
12723 || code == UNEQ || code == LTGT
12724 || code == UNGT || code == UNLT))
12725 {
12726 enum rtx_code or1, or2;
12727 rtx or1_rtx, or2_rtx, compare2_rtx;
12728 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12729
39a10a29
GK
12730 switch (code)
12731 {
12732 case LE: or1 = LT; or2 = EQ; break;
12733 case GE: or1 = GT; or2 = EQ; break;
12734 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12735 case LTGT: or1 = LT; or2 = GT; break;
12736 case UNGT: or1 = UNORDERED; or2 = GT; break;
12737 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12738 default: gcc_unreachable ();
39a10a29
GK
12739 }
12740 validate_condition_mode (or1, comp_mode);
12741 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12742 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12743 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12744 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12745 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12746 const_true_rtx);
12747 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12748
12749 compare_result = or_result;
12750 code = EQ;
12751 }
12752
12753 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12754
1c563bed 12755 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12756}
12757
12758
12759/* Emit the RTL for an sCOND pattern. */
12760
12761void
a2369ed3 12762rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12763{
12764 rtx condition_rtx;
12765 enum machine_mode op_mode;
b7053a3f 12766 enum rtx_code cond_code;
39a10a29
GK
12767
12768 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12769 cond_code = GET_CODE (condition_rtx);
12770
8ef65e3d 12771 if (rs6000_compare_fp_p
423c1189
AH
12772 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12773 {
12774 rtx t;
12775
12776 PUT_MODE (condition_rtx, SImode);
12777 t = XEXP (condition_rtx, 0);
12778
37409796 12779 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12780
12781 if (cond_code == NE)
64022b5d 12782 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12783
64022b5d 12784 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12785 return;
12786 }
12787
b7053a3f
GK
12788 if (cond_code == NE
12789 || cond_code == GE || cond_code == LE
12790 || cond_code == GEU || cond_code == LEU
12791 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12792 {
12793 rtx not_result = gen_reg_rtx (CCEQmode);
12794 rtx not_op, rev_cond_rtx;
12795 enum machine_mode cc_mode;
f676971a 12796
b7053a3f
GK
12797 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12798
1c563bed 12799 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12800 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12801 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12802 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12803 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12804 }
39a10a29
GK
12805
12806 op_mode = GET_MODE (rs6000_compare_op0);
12807 if (op_mode == VOIDmode)
12808 op_mode = GET_MODE (rs6000_compare_op1);
12809
12810 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12811 {
12812 PUT_MODE (condition_rtx, DImode);
12813 convert_move (result, condition_rtx, 0);
12814 }
12815 else
12816 {
12817 PUT_MODE (condition_rtx, SImode);
12818 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12819 }
12820}
12821
39a10a29
GK
12822/* Emit a branch of kind CODE to location LOC. */
12823
12824void
a2369ed3 12825rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12826{
12827 rtx condition_rtx, loc_ref;
12828
12829 condition_rtx = rs6000_generate_compare (code);
12830 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12831 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12832 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12833 loc_ref, pc_rtx)));
12834}
12835
12a4e8c5
GK
12836/* Return the string to output a conditional branch to LABEL, which is
12837 the operand number of the label, or -1 if the branch is really a
f676971a 12838 conditional return.
12a4e8c5
GK
12839
12840 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12841 condition code register and its mode specifies what kind of
12842 comparison we made.
12843
a0ab749a 12844 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12845
12846 INSN is the insn. */
12847
12848char *
a2369ed3 12849output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12850{
12851 static char string[64];
12852 enum rtx_code code = GET_CODE (op);
12853 rtx cc_reg = XEXP (op, 0);
12854 enum machine_mode mode = GET_MODE (cc_reg);
12855 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12856 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12857 int really_reversed = reversed ^ need_longbranch;
12858 char *s = string;
12859 const char *ccode;
12860 const char *pred;
12861 rtx note;
12862
39a10a29
GK
12863 validate_condition_mode (code, mode);
12864
12865 /* Work out which way this really branches. We could use
12866 reverse_condition_maybe_unordered here always but this
12867 makes the resulting assembler clearer. */
12a4e8c5 12868 if (really_reversed)
de40e1df
DJ
12869 {
12870 /* Reversal of FP compares takes care -- an ordered compare
12871 becomes an unordered compare and vice versa. */
12872 if (mode == CCFPmode)
12873 code = reverse_condition_maybe_unordered (code);
12874 else
12875 code = reverse_condition (code);
12876 }
12a4e8c5 12877
8ef65e3d 12878 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12879 {
12880 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12881 to the GT bit. */
37409796
NS
12882 switch (code)
12883 {
12884 case EQ:
12885 /* Opposite of GT. */
12886 code = GT;
12887 break;
12888
12889 case NE:
12890 code = UNLE;
12891 break;
12892
12893 default:
12894 gcc_unreachable ();
12895 }
a3170dc6
AH
12896 }
12897
39a10a29 12898 switch (code)
12a4e8c5
GK
12899 {
12900 /* Not all of these are actually distinct opcodes, but
12901 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12902 case NE: case LTGT:
12903 ccode = "ne"; break;
12904 case EQ: case UNEQ:
12905 ccode = "eq"; break;
f676971a 12906 case GE: case GEU:
50a0b056 12907 ccode = "ge"; break;
f676971a 12908 case GT: case GTU: case UNGT:
50a0b056 12909 ccode = "gt"; break;
f676971a 12910 case LE: case LEU:
50a0b056 12911 ccode = "le"; break;
f676971a 12912 case LT: case LTU: case UNLT:
50a0b056 12913 ccode = "lt"; break;
12a4e8c5
GK
12914 case UNORDERED: ccode = "un"; break;
12915 case ORDERED: ccode = "nu"; break;
12916 case UNGE: ccode = "nl"; break;
12917 case UNLE: ccode = "ng"; break;
12918 default:
37409796 12919 gcc_unreachable ();
12a4e8c5 12920 }
f676971a
EC
12921
12922 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12923 The old mnemonics don't have a way to specify this information. */
f4857b9b 12924 pred = "";
12a4e8c5
GK
12925 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12926 if (note != NULL_RTX)
12927 {
12928 /* PROB is the difference from 50%. */
12929 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12930
12931 /* Only hint for highly probable/improbable branches on newer
12932 cpus as static prediction overrides processor dynamic
12933 prediction. For older cpus we may as well always hint, but
12934 assume not taken for branches that are very close to 50% as a
12935 mispredicted taken branch is more expensive than a
f676971a 12936 mispredicted not-taken branch. */
ec507f2d 12937 if (rs6000_always_hint
2c9e13f3
JH
12938 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12939 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12940 {
12941 if (abs (prob) > REG_BR_PROB_BASE / 20
12942 && ((prob > 0) ^ need_longbranch))
c4ad648e 12943 pred = "+";
f4857b9b
AM
12944 else
12945 pred = "-";
12946 }
12a4e8c5 12947 }
12a4e8c5
GK
12948
12949 if (label == NULL)
94a54f47 12950 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12951 else
94a54f47 12952 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12953
37c67319 12954 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12955 Assume they'd only be the first character.... */
37c67319
GK
12956 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12957 *s++ = '%';
94a54f47 12958 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12959
12960 if (label != NULL)
12961 {
12962 /* If the branch distance was too far, we may have to use an
12963 unconditional branch to go the distance. */
12964 if (need_longbranch)
44518ddd 12965 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12966 else
12967 s += sprintf (s, ",%s", label);
12968 }
12969
12970 return string;
12971}
50a0b056 12972
64022b5d 12973/* Return the string to flip the GT bit on a CR. */
423c1189 12974char *
64022b5d 12975output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12976{
12977 static char string[64];
12978 int a, b;
12979
37409796
NS
12980 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12981 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12982
64022b5d
AH
12983 /* GT bit. */
12984 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12985 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12986
12987 sprintf (string, "crnot %d,%d", a, b);
12988 return string;
12989}
12990
21213b4c
DP
12991/* Return insn index for the vector compare instruction for given CODE,
12992 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12993 not available. */
12994
12995static int
94ff898d 12996get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12997 enum machine_mode dest_mode,
12998 enum machine_mode op_mode)
12999{
13000 if (!TARGET_ALTIVEC)
13001 return INSN_NOT_AVAILABLE;
13002
13003 switch (code)
13004 {
13005 case EQ:
13006 if (dest_mode == V16QImode && op_mode == V16QImode)
13007 return UNSPEC_VCMPEQUB;
13008 if (dest_mode == V8HImode && op_mode == V8HImode)
13009 return UNSPEC_VCMPEQUH;
13010 if (dest_mode == V4SImode && op_mode == V4SImode)
13011 return UNSPEC_VCMPEQUW;
13012 if (dest_mode == V4SImode && op_mode == V4SFmode)
13013 return UNSPEC_VCMPEQFP;
13014 break;
13015 case GE:
13016 if (dest_mode == V4SImode && op_mode == V4SFmode)
13017 return UNSPEC_VCMPGEFP;
13018 case GT:
13019 if (dest_mode == V16QImode && op_mode == V16QImode)
13020 return UNSPEC_VCMPGTSB;
13021 if (dest_mode == V8HImode && op_mode == V8HImode)
13022 return UNSPEC_VCMPGTSH;
13023 if (dest_mode == V4SImode && op_mode == V4SImode)
13024 return UNSPEC_VCMPGTSW;
13025 if (dest_mode == V4SImode && op_mode == V4SFmode)
13026 return UNSPEC_VCMPGTFP;
13027 break;
13028 case GTU:
13029 if (dest_mode == V16QImode && op_mode == V16QImode)
13030 return UNSPEC_VCMPGTUB;
13031 if (dest_mode == V8HImode && op_mode == V8HImode)
13032 return UNSPEC_VCMPGTUH;
13033 if (dest_mode == V4SImode && op_mode == V4SImode)
13034 return UNSPEC_VCMPGTUW;
13035 break;
13036 default:
13037 break;
13038 }
13039 return INSN_NOT_AVAILABLE;
13040}
13041
13042/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13043 DMODE is expected destination mode. This is a recursive function. */
13044
13045static rtx
13046rs6000_emit_vector_compare (enum rtx_code rcode,
13047 rtx op0, rtx op1,
13048 enum machine_mode dmode)
13049{
13050 int vec_cmp_insn;
13051 rtx mask;
13052 enum machine_mode dest_mode;
13053 enum machine_mode op_mode = GET_MODE (op1);
13054
37409796
NS
13055 gcc_assert (TARGET_ALTIVEC);
13056 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13057
13058 /* Floating point vector compare instructions uses destination V4SImode.
13059 Move destination to appropriate mode later. */
13060 if (dmode == V4SFmode)
13061 dest_mode = V4SImode;
13062 else
13063 dest_mode = dmode;
13064
13065 mask = gen_reg_rtx (dest_mode);
13066 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13067
13068 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13069 {
13070 bool swap_operands = false;
13071 bool try_again = false;
13072 switch (rcode)
13073 {
13074 case LT:
13075 rcode = GT;
13076 swap_operands = true;
13077 try_again = true;
13078 break;
13079 case LTU:
13080 rcode = GTU;
13081 swap_operands = true;
13082 try_again = true;
13083 break;
13084 case NE:
370df7db
JC
13085 case UNLE:
13086 case UNLT:
13087 case UNGE:
13088 case UNGT:
13089 /* Invert condition and try again.
13090 e.g., A != B becomes ~(A==B). */
21213b4c 13091 {
370df7db 13092 enum rtx_code rev_code;
21213b4c 13093 enum insn_code nor_code;
d1123cde 13094 rtx eq_rtx;
370df7db
JC
13095
13096 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13097 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13098 dest_mode);
94ff898d 13099
166cdb08 13100 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13101 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13102 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13103
13104 if (dmode != dest_mode)
13105 {
13106 rtx temp = gen_reg_rtx (dest_mode);
13107 convert_move (temp, mask, 0);
13108 return temp;
13109 }
13110 return mask;
13111 }
13112 break;
13113 case GE:
13114 case GEU:
13115 case LE:
13116 case LEU:
13117 /* Try GT/GTU/LT/LTU OR EQ */
13118 {
13119 rtx c_rtx, eq_rtx;
13120 enum insn_code ior_code;
13121 enum rtx_code new_code;
13122
37409796
NS
13123 switch (rcode)
13124 {
13125 case GE:
13126 new_code = GT;
13127 break;
13128
13129 case GEU:
13130 new_code = GTU;
13131 break;
13132
13133 case LE:
13134 new_code = LT;
13135 break;
13136
13137 case LEU:
13138 new_code = LTU;
13139 break;
13140
13141 default:
13142 gcc_unreachable ();
13143 }
21213b4c
DP
13144
13145 c_rtx = rs6000_emit_vector_compare (new_code,
13146 op0, op1, dest_mode);
13147 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13148 dest_mode);
13149
166cdb08 13150 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13151 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13152 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13153 if (dmode != dest_mode)
13154 {
13155 rtx temp = gen_reg_rtx (dest_mode);
13156 convert_move (temp, mask, 0);
13157 return temp;
13158 }
13159 return mask;
13160 }
13161 break;
13162 default:
37409796 13163 gcc_unreachable ();
21213b4c
DP
13164 }
13165
13166 if (try_again)
13167 {
13168 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13169 /* You only get two chances. */
13170 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13171 }
13172
13173 if (swap_operands)
13174 {
13175 rtx tmp;
13176 tmp = op0;
13177 op0 = op1;
13178 op1 = tmp;
13179 }
13180 }
13181
915167f5
GK
13182 emit_insn (gen_rtx_SET (VOIDmode, mask,
13183 gen_rtx_UNSPEC (dest_mode,
13184 gen_rtvec (2, op0, op1),
13185 vec_cmp_insn)));
21213b4c
DP
13186 if (dmode != dest_mode)
13187 {
13188 rtx temp = gen_reg_rtx (dest_mode);
13189 convert_move (temp, mask, 0);
13190 return temp;
13191 }
13192 return mask;
13193}
13194
13195/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13196 valid insn doesn exist for given mode. */
13197
13198static int
13199get_vsel_insn (enum machine_mode mode)
13200{
13201 switch (mode)
13202 {
13203 case V4SImode:
13204 return UNSPEC_VSEL4SI;
13205 break;
13206 case V4SFmode:
13207 return UNSPEC_VSEL4SF;
13208 break;
13209 case V8HImode:
13210 return UNSPEC_VSEL8HI;
13211 break;
13212 case V16QImode:
13213 return UNSPEC_VSEL16QI;
13214 break;
13215 default:
13216 return INSN_NOT_AVAILABLE;
13217 break;
13218 }
13219 return INSN_NOT_AVAILABLE;
13220}
13221
13222/* Emit vector select insn where DEST is destination using
13223 operands OP1, OP2 and MASK. */
13224
13225static void
13226rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13227{
13228 rtx t, temp;
13229 enum machine_mode dest_mode = GET_MODE (dest);
13230 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13231
13232 temp = gen_reg_rtx (dest_mode);
94ff898d 13233
bb8df8a6 13234 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13235 select op2. */
915167f5
GK
13236 t = gen_rtx_SET (VOIDmode, temp,
13237 gen_rtx_UNSPEC (dest_mode,
13238 gen_rtvec (3, op2, op1, mask),
13239 vsel_insn_index));
21213b4c
DP
13240 emit_insn (t);
13241 emit_move_insn (dest, temp);
13242 return;
13243}
13244
94ff898d 13245/* Emit vector conditional expression.
21213b4c
DP
13246 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13247 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13248
13249int
13250rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13251 rtx cond, rtx cc_op0, rtx cc_op1)
13252{
13253 enum machine_mode dest_mode = GET_MODE (dest);
13254 enum rtx_code rcode = GET_CODE (cond);
13255 rtx mask;
13256
13257 if (!TARGET_ALTIVEC)
13258 return 0;
13259
13260 /* Get the vector mask for the given relational operations. */
13261 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13262
13263 rs6000_emit_vector_select (dest, op1, op2, mask);
13264
13265 return 1;
13266}
13267
50a0b056
GK
13268/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13269 operands of the last comparison is nonzero/true, FALSE_COND if it
13270 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13271
50a0b056 13272int
a2369ed3 13273rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13274{
13275 enum rtx_code code = GET_CODE (op);
13276 rtx op0 = rs6000_compare_op0;
13277 rtx op1 = rs6000_compare_op1;
13278 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13279 enum machine_mode compare_mode = GET_MODE (op0);
13280 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13281 rtx temp;
add2402e 13282 bool is_against_zero;
50a0b056 13283
a3c9585f 13284 /* These modes should always match. */
a3170dc6
AH
13285 if (GET_MODE (op1) != compare_mode
13286 /* In the isel case however, we can use a compare immediate, so
13287 op1 may be a small constant. */
13288 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13289 return 0;
178c3eff 13290 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13291 return 0;
178c3eff 13292 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13293 return 0;
13294
50a0b056 13295 /* First, work out if the hardware can do this at all, or
a3c9585f 13296 if it's too slow.... */
50a0b056 13297 if (! rs6000_compare_fp_p)
a3170dc6
AH
13298 {
13299 if (TARGET_ISEL)
13300 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13301 return 0;
13302 }
8ef65e3d 13303 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13304 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13305 return 0;
50a0b056 13306
add2402e 13307 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13308
add2402e
GK
13309 /* A floating-point subtract might overflow, underflow, or produce
13310 an inexact result, thus changing the floating-point flags, so it
13311 can't be generated if we care about that. It's safe if one side
13312 of the construct is zero, since then no subtract will be
13313 generated. */
ebb109ad 13314 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13315 && flag_trapping_math && ! is_against_zero)
13316 return 0;
13317
50a0b056
GK
13318 /* Eliminate half of the comparisons by switching operands, this
13319 makes the remaining code simpler. */
13320 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13321 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13322 {
13323 code = reverse_condition_maybe_unordered (code);
13324 temp = true_cond;
13325 true_cond = false_cond;
13326 false_cond = temp;
13327 }
13328
13329 /* UNEQ and LTGT take four instructions for a comparison with zero,
13330 it'll probably be faster to use a branch here too. */
bc9ec0e0 13331 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13332 return 0;
f676971a 13333
50a0b056
GK
13334 if (GET_CODE (op1) == CONST_DOUBLE)
13335 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13336
b6d08ca1 13337 /* We're going to try to implement comparisons by performing
50a0b056
GK
13338 a subtract, then comparing against zero. Unfortunately,
13339 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13340 know that the operand is finite and the comparison
50a0b056 13341 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13342 if (HONOR_INFINITIES (compare_mode)
50a0b056 13343 && code != GT && code != UNGE
045572c7 13344 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13345 /* Constructs of the form (a OP b ? a : b) are safe. */
13346 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13347 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13348 && ! rtx_equal_p (op1, true_cond))))
13349 return 0;
add2402e 13350
50a0b056
GK
13351 /* At this point we know we can use fsel. */
13352
13353 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13354 if (! is_against_zero)
13355 {
13356 temp = gen_reg_rtx (compare_mode);
13357 emit_insn (gen_rtx_SET (VOIDmode, temp,
13358 gen_rtx_MINUS (compare_mode, op0, op1)));
13359 op0 = temp;
13360 op1 = CONST0_RTX (compare_mode);
13361 }
50a0b056
GK
13362
13363 /* If we don't care about NaNs we can reduce some of the comparisons
13364 down to faster ones. */
bc9ec0e0 13365 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13366 switch (code)
13367 {
13368 case GT:
13369 code = LE;
13370 temp = true_cond;
13371 true_cond = false_cond;
13372 false_cond = temp;
13373 break;
13374 case UNGE:
13375 code = GE;
13376 break;
13377 case UNEQ:
13378 code = EQ;
13379 break;
13380 default:
13381 break;
13382 }
13383
13384 /* Now, reduce everything down to a GE. */
13385 switch (code)
13386 {
13387 case GE:
13388 break;
13389
13390 case LE:
3148ad6d
DJ
13391 temp = gen_reg_rtx (compare_mode);
13392 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13393 op0 = temp;
13394 break;
13395
13396 case ORDERED:
3148ad6d
DJ
13397 temp = gen_reg_rtx (compare_mode);
13398 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13399 op0 = temp;
13400 break;
13401
13402 case EQ:
3148ad6d 13403 temp = gen_reg_rtx (compare_mode);
f676971a 13404 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13405 gen_rtx_NEG (compare_mode,
13406 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13407 op0 = temp;
13408 break;
13409
13410 case UNGE:
bc9ec0e0 13411 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13412 temp = gen_reg_rtx (result_mode);
50a0b056 13413 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13414 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13415 gen_rtx_GE (VOIDmode,
13416 op0, op1),
13417 true_cond, false_cond)));
bc9ec0e0
GK
13418 false_cond = true_cond;
13419 true_cond = temp;
50a0b056 13420
3148ad6d
DJ
13421 temp = gen_reg_rtx (compare_mode);
13422 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13423 op0 = temp;
13424 break;
13425
13426 case GT:
bc9ec0e0 13427 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13428 temp = gen_reg_rtx (result_mode);
50a0b056 13429 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13430 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13431 gen_rtx_GE (VOIDmode,
13432 op0, op1),
13433 true_cond, false_cond)));
bc9ec0e0
GK
13434 true_cond = false_cond;
13435 false_cond = temp;
50a0b056 13436
3148ad6d
DJ
13437 temp = gen_reg_rtx (compare_mode);
13438 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13439 op0 = temp;
13440 break;
13441
13442 default:
37409796 13443 gcc_unreachable ();
50a0b056
GK
13444 }
13445
13446 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13447 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13448 gen_rtx_GE (VOIDmode,
13449 op0, op1),
13450 true_cond, false_cond)));
13451 return 1;
13452}
13453
a3170dc6
AH
13454/* Same as above, but for ints (isel). */
13455
13456static int
a2369ed3 13457rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13458{
13459 rtx condition_rtx, cr;
13460
13461 /* All isel implementations thus far are 32-bits. */
13462 if (GET_MODE (rs6000_compare_op0) != SImode)
13463 return 0;
13464
13465 /* We still have to do the compare, because isel doesn't do a
13466 compare, it just looks at the CRx bits set by a previous compare
13467 instruction. */
13468 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13469 cr = XEXP (condition_rtx, 0);
13470
13471 if (GET_MODE (cr) == CCmode)
13472 emit_insn (gen_isel_signed (dest, condition_rtx,
13473 true_cond, false_cond, cr));
13474 else
13475 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13476 true_cond, false_cond, cr));
13477
13478 return 1;
13479}
13480
13481const char *
a2369ed3 13482output_isel (rtx *operands)
a3170dc6
AH
13483{
13484 enum rtx_code code;
13485
13486 code = GET_CODE (operands[1]);
13487 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13488 {
13489 PUT_CODE (operands[1], reverse_condition (code));
13490 return "isel %0,%3,%2,%j1";
13491 }
13492 else
13493 return "isel %0,%2,%3,%j1";
13494}
13495
50a0b056 13496void
a2369ed3 13497rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13498{
13499 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13500 enum rtx_code c;
50a0b056 13501 rtx target;
5dc8d536
AH
13502
13503 if (code == SMAX || code == SMIN)
13504 c = GE;
13505 else
13506 c = GEU;
13507
50a0b056 13508 if (code == SMAX || code == UMAX)
f676971a 13509 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13510 op0, op1, mode, 0);
13511 else
f676971a 13512 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13513 op1, op0, mode, 0);
37409796 13514 gcc_assert (target);
50a0b056
GK
13515 if (target != dest)
13516 emit_move_insn (dest, target);
13517}
46c07df8 13518
915167f5
GK
13519/* Emit instructions to perform a load-reserved/store-conditional operation.
13520 The operation performed is an atomic
13521 (set M (CODE:MODE M OP))
13522 If not NULL, BEFORE is atomically set to M before the operation, and
13523 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13524 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13525 Either OP or M may be wrapped in a NOT operation. */
13526
13527void
13528rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13529 rtx m, rtx op, rtx before_param, rtx after_param,
13530 bool sync_p)
13531{
13532 enum machine_mode used_mode;
13533 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13534 rtx used_m;
13535 rtvec vec;
13536 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13537 rtx shift = NULL_RTX;
bb8df8a6 13538
915167f5
GK
13539 if (sync_p)
13540 emit_insn (gen_memory_barrier ());
bb8df8a6 13541
915167f5
GK
13542 if (GET_CODE (m) == NOT)
13543 used_m = XEXP (m, 0);
13544 else
13545 used_m = m;
13546
13547 /* If this is smaller than SImode, we'll have to use SImode with
13548 adjustments. */
13549 if (mode == QImode || mode == HImode)
13550 {
13551 rtx newop, oldop;
13552
13553 if (MEM_ALIGN (used_m) >= 32)
13554 {
13555 int ishift = 0;
13556 if (BYTES_BIG_ENDIAN)
13557 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13558
915167f5 13559 shift = GEN_INT (ishift);
c75c6d11 13560 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13561 }
13562 else
13563 {
13564 rtx addrSI, aligned_addr;
a9c9d3fa 13565 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13566
c75c6d11
JJ
13567 addrSI = gen_lowpart_common (SImode,
13568 force_reg (Pmode, XEXP (used_m, 0)));
13569 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13570 shift = gen_reg_rtx (SImode);
13571
13572 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13573 GEN_INT (shift_mask)));
13574 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13575
13576 aligned_addr = expand_binop (Pmode, and_optab,
13577 XEXP (used_m, 0),
13578 GEN_INT (-4), NULL_RTX,
13579 1, OPTAB_LIB_WIDEN);
13580 used_m = change_address (used_m, SImode, aligned_addr);
13581 set_mem_align (used_m, 32);
915167f5 13582 }
c75c6d11
JJ
13583 /* It's safe to keep the old alias set of USED_M, because
13584 the operation is atomic and only affects the original
13585 USED_M. */
13586 if (GET_CODE (m) == NOT)
13587 m = gen_rtx_NOT (SImode, used_m);
13588 else
13589 m = used_m;
915167f5
GK
13590
13591 if (GET_CODE (op) == NOT)
13592 {
13593 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13594 oldop = gen_rtx_NOT (SImode, oldop);
13595 }
13596 else
13597 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13598
915167f5
GK
13599 switch (code)
13600 {
13601 case IOR:
13602 case XOR:
13603 newop = expand_binop (SImode, and_optab,
13604 oldop, GEN_INT (imask), NULL_RTX,
13605 1, OPTAB_LIB_WIDEN);
13606 emit_insn (gen_ashlsi3 (newop, newop, shift));
13607 break;
13608
13609 case AND:
13610 newop = expand_binop (SImode, ior_optab,
13611 oldop, GEN_INT (~imask), NULL_RTX,
13612 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13613 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13614 break;
13615
13616 case PLUS:
9f0076e5 13617 case MINUS:
915167f5
GK
13618 {
13619 rtx mask;
bb8df8a6 13620
915167f5
GK
13621 newop = expand_binop (SImode, and_optab,
13622 oldop, GEN_INT (imask), NULL_RTX,
13623 1, OPTAB_LIB_WIDEN);
13624 emit_insn (gen_ashlsi3 (newop, newop, shift));
13625
13626 mask = gen_reg_rtx (SImode);
13627 emit_move_insn (mask, GEN_INT (imask));
13628 emit_insn (gen_ashlsi3 (mask, mask, shift));
13629
9f0076e5
DE
13630 if (code == PLUS)
13631 newop = gen_rtx_PLUS (SImode, m, newop);
13632 else
13633 newop = gen_rtx_MINUS (SImode, m, newop);
13634 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13635 newop = gen_rtx_IOR (SImode, newop,
13636 gen_rtx_AND (SImode,
13637 gen_rtx_NOT (SImode, mask),
13638 m));
13639 break;
13640 }
13641
13642 default:
13643 gcc_unreachable ();
13644 }
13645
a9c9d3fa
GK
13646 if (GET_CODE (m) == NOT)
13647 {
13648 rtx mask, xorm;
13649
13650 mask = gen_reg_rtx (SImode);
13651 emit_move_insn (mask, GEN_INT (imask));
13652 emit_insn (gen_ashlsi3 (mask, mask, shift));
13653
13654 xorm = gen_rtx_XOR (SImode, used_m, mask);
13655 /* Depending on the value of 'op', the XOR or the operation might
13656 be able to be simplified away. */
13657 newop = simplify_gen_binary (code, SImode, xorm, newop);
13658 }
915167f5
GK
13659 op = newop;
13660 used_mode = SImode;
13661 before = gen_reg_rtx (used_mode);
13662 after = gen_reg_rtx (used_mode);
13663 }
13664 else
13665 {
13666 used_mode = mode;
13667 before = before_param;
13668 after = after_param;
13669
13670 if (before == NULL_RTX)
13671 before = gen_reg_rtx (used_mode);
13672 if (after == NULL_RTX)
13673 after = gen_reg_rtx (used_mode);
13674 }
bb8df8a6 13675
9f0076e5
DE
13676 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13677 && used_mode != mode)
915167f5
GK
13678 the_op = op; /* Computed above. */
13679 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13680 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13681 else
13682 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13683
13684 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13685 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13686 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13687 gen_rtx_UNSPEC (used_mode,
13688 gen_rtvec (1, the_op),
13689 UNSPEC_SYNC_OP));
915167f5
GK
13690 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13691
9f0076e5 13692 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13693 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13694 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13695 else
13696 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13697 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13698
13699 /* Shift and mask the return values properly. */
13700 if (used_mode != mode && before_param)
13701 {
13702 emit_insn (gen_lshrsi3 (before, before, shift));
13703 convert_move (before_param, before, 1);
13704 }
13705
13706 if (used_mode != mode && after_param)
13707 {
13708 emit_insn (gen_lshrsi3 (after, after, shift));
13709 convert_move (after_param, after, 1);
13710 }
13711
13712 /* The previous sequence will end with a branch that's dependent on
13713 the conditional store, so placing an isync will ensure that no
13714 other instructions (especially, no load or store instructions)
13715 can start before the atomic operation completes. */
13716 if (sync_p)
13717 emit_insn (gen_isync ());
13718}
13719
b52110d4
DE
13720/* A subroutine of the atomic operation splitters. Jump to LABEL if
13721 COND is true. Mark the jump as unlikely to be taken. */
13722
13723static void
13724emit_unlikely_jump (rtx cond, rtx label)
13725{
13726 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13727 rtx x;
13728
13729 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13730 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13731 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13732}
13733
13734/* A subroutine of the atomic operation splitters. Emit a load-locked
13735 instruction in MODE. */
13736
13737static void
13738emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13739{
13740 rtx (*fn) (rtx, rtx) = NULL;
13741 if (mode == SImode)
13742 fn = gen_load_locked_si;
13743 else if (mode == DImode)
13744 fn = gen_load_locked_di;
13745 emit_insn (fn (reg, mem));
13746}
13747
13748/* A subroutine of the atomic operation splitters. Emit a store-conditional
13749 instruction in MODE. */
13750
13751static void
13752emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13753{
13754 rtx (*fn) (rtx, rtx, rtx) = NULL;
13755 if (mode == SImode)
13756 fn = gen_store_conditional_si;
13757 else if (mode == DImode)
13758 fn = gen_store_conditional_di;
13759
9f0076e5 13760 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13761 if (PPC405_ERRATUM77)
13762 emit_insn (gen_memory_barrier ());
13763
13764 emit_insn (fn (res, mem, val));
13765}
13766
ea2c620c 13767/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13768 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13769 operand of the binary operator. BEFORE and AFTER are optional locations to
13770 return the value of MEM either before of after the operation. SCRATCH is
13771 a scratch register. */
13772
13773void
13774rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13775 rtx before, rtx after, rtx scratch)
13776{
13777 enum machine_mode mode = GET_MODE (mem);
13778 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13779
13780 emit_insn (gen_memory_barrier ());
13781
13782 label = gen_label_rtx ();
13783 emit_label (label);
13784 label = gen_rtx_LABEL_REF (VOIDmode, label);
13785
13786 if (before == NULL_RTX)
13787 before = scratch;
13788 emit_load_locked (mode, before, mem);
13789
13790 if (code == NOT)
13791 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13792 else if (code == AND)
13793 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13794 else
13795 x = gen_rtx_fmt_ee (code, mode, before, val);
13796
13797 if (after != NULL_RTX)
13798 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13799 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13800
13801 emit_store_conditional (mode, cond, mem, scratch);
13802
13803 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13804 emit_unlikely_jump (x, label);
13805
13806 emit_insn (gen_isync ());
13807}
13808
b52110d4
DE
13809/* Expand an atomic compare and swap operation. MEM is the memory on which
13810 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13811 value to be stored. SCRATCH is a scratch GPR. */
13812
13813void
13814rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13815 rtx scratch)
13816{
13817 enum machine_mode mode = GET_MODE (mem);
13818 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13819
13820 emit_insn (gen_memory_barrier ());
13821
13822 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13823 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13824 emit_label (XEXP (label1, 0));
13825
13826 emit_load_locked (mode, retval, mem);
13827
13828 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13829 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13830
13831 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13832 emit_unlikely_jump (x, label2);
13833
13834 emit_move_insn (scratch, newval);
13835 emit_store_conditional (mode, cond, mem, scratch);
13836
13837 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13838 emit_unlikely_jump (x, label1);
13839
13840 emit_insn (gen_isync ());
13841 emit_label (XEXP (label2, 0));
13842}
13843
13844/* Expand an atomic test and set operation. MEM is the memory on which
13845 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13846
13847void
13848rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13849{
13850 enum machine_mode mode = GET_MODE (mem);
13851 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13852
13853 emit_insn (gen_memory_barrier ());
13854
13855 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13856 emit_label (XEXP (label, 0));
13857
13858 emit_load_locked (mode, retval, mem);
13859 emit_move_insn (scratch, val);
13860 emit_store_conditional (mode, cond, mem, scratch);
13861
13862 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13863 emit_unlikely_jump (x, label);
13864
13865 emit_insn (gen_isync ());
13866}
13867
9fc75b97
DE
13868void
13869rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13870{
13871 enum machine_mode mode = GET_MODE (mem);
13872 rtx addrSI, align, wdst, shift, mask;
13873 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13874 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13875
13876 /* Shift amount for subword relative to aligned word. */
13877 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13878 shift = gen_reg_rtx (SImode);
13879 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13880 GEN_INT (shift_mask)));
13881 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13882
13883 /* Shift and mask old value into position within word. */
13884 oldval = convert_modes (SImode, mode, oldval, 1);
13885 oldval = expand_binop (SImode, and_optab,
13886 oldval, GEN_INT (imask), NULL_RTX,
13887 1, OPTAB_LIB_WIDEN);
13888 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13889
13890 /* Shift and mask new value into position within word. */
13891 newval = convert_modes (SImode, mode, newval, 1);
13892 newval = expand_binop (SImode, and_optab,
13893 newval, GEN_INT (imask), NULL_RTX,
13894 1, OPTAB_LIB_WIDEN);
13895 emit_insn (gen_ashlsi3 (newval, newval, shift));
13896
13897 /* Mask for insertion. */
13898 mask = gen_reg_rtx (SImode);
13899 emit_move_insn (mask, GEN_INT (imask));
13900 emit_insn (gen_ashlsi3 (mask, mask, shift));
13901
13902 /* Address of aligned word containing subword. */
13903 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13904 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13905 mem = change_address (mem, SImode, align);
13906 set_mem_align (mem, 32);
13907 MEM_VOLATILE_P (mem) = 1;
13908
13909 wdst = gen_reg_rtx (SImode);
13910 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13911 oldval, newval, mem));
13912
2725b75c
JJ
13913 /* Shift the result back. */
13914 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
13915
9fc75b97
DE
13916 emit_move_insn (dst, gen_lowpart (mode, wdst));
13917}
13918
13919void
13920rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13921 rtx oldval, rtx newval, rtx mem,
13922 rtx scratch)
13923{
13924 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13925
13926 emit_insn (gen_memory_barrier ());
13927 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13928 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13929 emit_label (XEXP (label1, 0));
13930
13931 emit_load_locked (SImode, scratch, mem);
13932
13933 /* Mask subword within loaded value for comparison with oldval.
13934 Use UNSPEC_AND to avoid clobber.*/
13935 emit_insn (gen_rtx_SET (SImode, dest,
13936 gen_rtx_UNSPEC (SImode,
13937 gen_rtvec (2, scratch, mask),
13938 UNSPEC_AND)));
13939
13940 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13941 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13942
13943 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13944 emit_unlikely_jump (x, label2);
13945
13946 /* Clear subword within loaded value for insertion of new value. */
13947 emit_insn (gen_rtx_SET (SImode, scratch,
13948 gen_rtx_AND (SImode,
13949 gen_rtx_NOT (SImode, mask), scratch)));
13950 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13951 emit_store_conditional (SImode, cond, mem, scratch);
13952
13953 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13954 emit_unlikely_jump (x, label1);
13955
13956 emit_insn (gen_isync ());
13957 emit_label (XEXP (label2, 0));
13958}
13959
13960
b52110d4 13961 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13962 multi-register moves. It will emit at most one instruction for
13963 each register that is accessed; that is, it won't emit li/lis pairs
13964 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13965 register. */
46c07df8 13966
46c07df8 13967void
a9baceb1 13968rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13969{
a9baceb1
GK
13970 /* The register number of the first register being moved. */
13971 int reg;
13972 /* The mode that is to be moved. */
13973 enum machine_mode mode;
13974 /* The mode that the move is being done in, and its size. */
13975 enum machine_mode reg_mode;
13976 int reg_mode_size;
13977 /* The number of registers that will be moved. */
13978 int nregs;
13979
13980 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13981 mode = GET_MODE (dst);
c8b622ff 13982 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13983 if (FP_REGNO_P (reg))
7393f7f8 13984 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13985 else if (ALTIVEC_REGNO_P (reg))
13986 reg_mode = V16QImode;
4d4447b5
PB
13987 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13988 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13989 else
13990 reg_mode = word_mode;
13991 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13992
37409796 13993 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13994
a9baceb1
GK
13995 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13996 {
13997 /* Move register range backwards, if we might have destructive
13998 overlap. */
13999 int i;
14000 for (i = nregs - 1; i >= 0; i--)
f676971a 14001 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14002 simplify_gen_subreg (reg_mode, dst, mode,
14003 i * reg_mode_size),
14004 simplify_gen_subreg (reg_mode, src, mode,
14005 i * reg_mode_size)));
14006 }
46c07df8
HP
14007 else
14008 {
a9baceb1
GK
14009 int i;
14010 int j = -1;
14011 bool used_update = false;
46c07df8 14012
c1e55850 14013 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14014 {
14015 rtx breg;
3a1f863f 14016
a9baceb1
GK
14017 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14018 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14019 {
14020 rtx delta_rtx;
a9baceb1 14021 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14022 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14023 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14024 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14025 emit_insn (TARGET_32BIT
14026 ? gen_addsi3 (breg, breg, delta_rtx)
14027 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14028 src = replace_equiv_address (src, breg);
3a1f863f 14029 }
d04b6e6e 14030 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14031 {
13e2e16e 14032 rtx basereg;
c1e55850
GK
14033 basereg = gen_rtx_REG (Pmode, reg);
14034 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14035 src = replace_equiv_address (src, basereg);
c1e55850 14036 }
3a1f863f 14037
0423421f
AM
14038 breg = XEXP (src, 0);
14039 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14040 breg = XEXP (breg, 0);
14041
14042 /* If the base register we are using to address memory is
14043 also a destination reg, then change that register last. */
14044 if (REG_P (breg)
14045 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14046 && REGNO (breg) < REGNO (dst) + nregs)
14047 j = REGNO (breg) - REGNO (dst);
c4ad648e 14048 }
46c07df8 14049
a9baceb1 14050 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14051 {
14052 rtx breg;
14053
a9baceb1
GK
14054 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14055 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14056 {
14057 rtx delta_rtx;
a9baceb1 14058 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14059 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14060 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14061 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14062
14063 /* We have to update the breg before doing the store.
14064 Use store with update, if available. */
14065
14066 if (TARGET_UPDATE)
14067 {
a9baceb1 14068 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14069 emit_insn (TARGET_32BIT
14070 ? (TARGET_POWERPC64
14071 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14072 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14073 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14074 used_update = true;
3a1f863f
DE
14075 }
14076 else
a9baceb1
GK
14077 emit_insn (TARGET_32BIT
14078 ? gen_addsi3 (breg, breg, delta_rtx)
14079 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14080 dst = replace_equiv_address (dst, breg);
3a1f863f 14081 }
37409796 14082 else
d04b6e6e 14083 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14084 }
14085
46c07df8 14086 for (i = 0; i < nregs; i++)
f676971a 14087 {
3a1f863f
DE
14088 /* Calculate index to next subword. */
14089 ++j;
f676971a 14090 if (j == nregs)
3a1f863f 14091 j = 0;
46c07df8 14092
112cdef5 14093 /* If compiler already emitted move of first word by
a9baceb1 14094 store with update, no need to do anything. */
3a1f863f 14095 if (j == 0 && used_update)
a9baceb1 14096 continue;
f676971a 14097
a9baceb1
GK
14098 emit_insn (gen_rtx_SET (VOIDmode,
14099 simplify_gen_subreg (reg_mode, dst, mode,
14100 j * reg_mode_size),
14101 simplify_gen_subreg (reg_mode, src, mode,
14102 j * reg_mode_size)));
3a1f863f 14103 }
46c07df8
HP
14104 }
14105}
14106
12a4e8c5 14107\f
a4f6c312
SS
14108/* This page contains routines that are used to determine what the
14109 function prologue and epilogue code will do and write them out. */
9878760c 14110
a4f6c312
SS
14111/* Return the first fixed-point register that is required to be
14112 saved. 32 if none. */
9878760c
RK
14113
14114int
863d938c 14115first_reg_to_save (void)
9878760c
RK
14116{
14117 int first_reg;
14118
14119 /* Find lowest numbered live register. */
14120 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14121 if (df_regs_ever_live_p (first_reg)
a38d360d 14122 && (! call_used_regs[first_reg]
1db02437 14123 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14124 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14125 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14126 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14127 break;
14128
ee890fe2 14129#if TARGET_MACHO
93638d7a
AM
14130 if (flag_pic
14131 && current_function_uses_pic_offset_table
14132 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14133 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14134#endif
14135
9878760c
RK
14136 return first_reg;
14137}
14138
14139/* Similar, for FP regs. */
14140
14141int
863d938c 14142first_fp_reg_to_save (void)
9878760c
RK
14143{
14144 int first_reg;
14145
14146 /* Find lowest numbered live register. */
14147 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14148 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14149 break;
14150
14151 return first_reg;
14152}
00b960c7
AH
14153
14154/* Similar, for AltiVec regs. */
14155
14156static int
863d938c 14157first_altivec_reg_to_save (void)
00b960c7
AH
14158{
14159 int i;
14160
14161 /* Stack frame remains as is unless we are in AltiVec ABI. */
14162 if (! TARGET_ALTIVEC_ABI)
14163 return LAST_ALTIVEC_REGNO + 1;
14164
22fa69da 14165 /* On Darwin, the unwind routines are compiled without
982afe02 14166 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14167 altivec registers when necessary. */
14168 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14169 && ! TARGET_ALTIVEC)
14170 return FIRST_ALTIVEC_REGNO + 20;
14171
00b960c7
AH
14172 /* Find lowest numbered live register. */
14173 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14174 if (df_regs_ever_live_p (i))
00b960c7
AH
14175 break;
14176
14177 return i;
14178}
14179
14180/* Return a 32-bit mask of the AltiVec registers we need to set in
14181 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14182 the 32-bit word is 0. */
14183
14184static unsigned int
863d938c 14185compute_vrsave_mask (void)
00b960c7
AH
14186{
14187 unsigned int i, mask = 0;
14188
22fa69da 14189 /* On Darwin, the unwind routines are compiled without
982afe02 14190 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
14191 call-saved altivec registers when necessary. */
14192 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
14193 && ! TARGET_ALTIVEC)
14194 mask |= 0xFFF;
14195
00b960c7
AH
14196 /* First, find out if we use _any_ altivec registers. */
14197 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14198 if (df_regs_ever_live_p (i))
00b960c7
AH
14199 mask |= ALTIVEC_REG_BIT (i);
14200
14201 if (mask == 0)
14202 return mask;
14203
00b960c7
AH
14204 /* Next, remove the argument registers from the set. These must
14205 be in the VRSAVE mask set by the caller, so we don't need to add
14206 them in again. More importantly, the mask we compute here is
14207 used to generate CLOBBERs in the set_vrsave insn, and we do not
14208 wish the argument registers to die. */
38173d38 14209 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14210 mask &= ~ALTIVEC_REG_BIT (i);
14211
14212 /* Similarly, remove the return value from the set. */
14213 {
14214 bool yes = false;
14215 diddle_return_value (is_altivec_return_reg, &yes);
14216 if (yes)
14217 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14218 }
14219
14220 return mask;
14221}
14222
d62294f5 14223/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14224 size of prologues/epilogues by calling our own save/restore-the-world
14225 routines. */
d62294f5
FJ
14226
14227static void
f57fe068
AM
14228compute_save_world_info (rs6000_stack_t *info_ptr)
14229{
14230 info_ptr->world_save_p = 1;
14231 info_ptr->world_save_p
14232 = (WORLD_SAVE_P (info_ptr)
14233 && DEFAULT_ABI == ABI_DARWIN
14234 && ! (current_function_calls_setjmp && flag_exceptions)
14235 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14236 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14237 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14238 && info_ptr->cr_save_p);
f676971a 14239
d62294f5
FJ
14240 /* This will not work in conjunction with sibcalls. Make sure there
14241 are none. (This check is expensive, but seldom executed.) */
f57fe068 14242 if (WORLD_SAVE_P (info_ptr))
f676971a 14243 {
d62294f5
FJ
14244 rtx insn;
14245 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14246 if ( GET_CODE (insn) == CALL_INSN
14247 && SIBLING_CALL_P (insn))
14248 {
14249 info_ptr->world_save_p = 0;
14250 break;
14251 }
d62294f5 14252 }
f676971a 14253
f57fe068 14254 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14255 {
14256 /* Even if we're not touching VRsave, make sure there's room on the
14257 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14258 will attempt to save it. */
d62294f5
FJ
14259 info_ptr->vrsave_size = 4;
14260
298ac1dd
AP
14261 /* If we are going to save the world, we need to save the link register too. */
14262 info_ptr->lr_save_p = 1;
14263
d62294f5
FJ
14264 /* "Save" the VRsave register too if we're saving the world. */
14265 if (info_ptr->vrsave_mask == 0)
c4ad648e 14266 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14267
14268 /* Because the Darwin register save/restore routines only handle
c4ad648e 14269 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14270 check. */
37409796
NS
14271 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14272 && (info_ptr->first_altivec_reg_save
14273 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14274 }
f676971a 14275 return;
d62294f5
FJ
14276}
14277
14278
00b960c7 14279static void
a2369ed3 14280is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14281{
14282 bool *yes = (bool *) xyes;
14283 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14284 *yes = true;
14285}
14286
4697a36c
MM
14287\f
14288/* Calculate the stack information for the current function. This is
14289 complicated by having two separate calling sequences, the AIX calling
14290 sequence and the V.4 calling sequence.
14291
592696dd 14292 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14293 32-bit 64-bit
4697a36c 14294 SP----> +---------------------------------------+
a260abc9 14295 | back chain to caller | 0 0
4697a36c 14296 +---------------------------------------+
a260abc9 14297 | saved CR | 4 8 (8-11)
4697a36c 14298 +---------------------------------------+
a260abc9 14299 | saved LR | 8 16
4697a36c 14300 +---------------------------------------+
a260abc9 14301 | reserved for compilers | 12 24
4697a36c 14302 +---------------------------------------+
a260abc9 14303 | reserved for binders | 16 32
4697a36c 14304 +---------------------------------------+
a260abc9 14305 | saved TOC pointer | 20 40
4697a36c 14306 +---------------------------------------+
a260abc9 14307 | Parameter save area (P) | 24 48
4697a36c 14308 +---------------------------------------+
a260abc9 14309 | Alloca space (A) | 24+P etc.
802a0058 14310 +---------------------------------------+
a7df97e6 14311 | Local variable space (L) | 24+P+A
4697a36c 14312 +---------------------------------------+
a7df97e6 14313 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14314 +---------------------------------------+
00b960c7
AH
14315 | Save area for AltiVec registers (W) | 24+P+A+L+X
14316 +---------------------------------------+
14317 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14318 +---------------------------------------+
14319 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14320 +---------------------------------------+
00b960c7
AH
14321 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14322 +---------------------------------------+
14323 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14324 +---------------------------------------+
14325 old SP->| back chain to caller's caller |
14326 +---------------------------------------+
14327
5376a30c
KR
14328 The required alignment for AIX configurations is two words (i.e., 8
14329 or 16 bytes).
14330
14331
4697a36c
MM
14332 V.4 stack frames look like:
14333
14334 SP----> +---------------------------------------+
14335 | back chain to caller | 0
14336 +---------------------------------------+
5eb387b8 14337 | caller's saved LR | 4
4697a36c
MM
14338 +---------------------------------------+
14339 | Parameter save area (P) | 8
14340 +---------------------------------------+
a7df97e6 14341 | Alloca space (A) | 8+P
f676971a 14342 +---------------------------------------+
a7df97e6 14343 | Varargs save area (V) | 8+P+A
f676971a 14344 +---------------------------------------+
a7df97e6 14345 | Local variable space (L) | 8+P+A+V
f676971a 14346 +---------------------------------------+
a7df97e6 14347 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14348 +---------------------------------------+
00b960c7
AH
14349 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14350 +---------------------------------------+
14351 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14352 +---------------------------------------+
14353 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14354 +---------------------------------------+
c4ad648e
AM
14355 | SPE: area for 64-bit GP registers |
14356 +---------------------------------------+
14357 | SPE alignment padding |
14358 +---------------------------------------+
00b960c7 14359 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14360 +---------------------------------------+
00b960c7 14361 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14362 +---------------------------------------+
00b960c7 14363 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14364 +---------------------------------------+
14365 old SP->| back chain to caller's caller |
14366 +---------------------------------------+
b6c9286a 14367
5376a30c
KR
14368 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14369 given. (But note below and in sysv4.h that we require only 8 and
14370 may round up the size of our stack frame anyways. The historical
14371 reason is early versions of powerpc-linux which didn't properly
14372 align the stack at program startup. A happy side-effect is that
14373 -mno-eabi libraries can be used with -meabi programs.)
14374
50d440bc 14375 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14376 the stack alignment requirements may differ. If -mno-eabi is not
14377 given, the required stack alignment is 8 bytes; if -mno-eabi is
14378 given, the required alignment is 16 bytes. (But see V.4 comment
14379 above.) */
4697a36c 14380
61b2fbe7
MM
14381#ifndef ABI_STACK_BOUNDARY
14382#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14383#endif
14384
d1d0c603 14385static rs6000_stack_t *
863d938c 14386rs6000_stack_info (void)
4697a36c 14387{
022123e6 14388 static rs6000_stack_t info;
4697a36c 14389 rs6000_stack_t *info_ptr = &info;
327e5343 14390 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14391 int ehrd_size;
64045029 14392 int save_align;
8070c91a 14393 int first_gp;
44688022 14394 HOST_WIDE_INT non_fixed_size;
4697a36c 14395
022123e6 14396 memset (&info, 0, sizeof (info));
4697a36c 14397
c19de7aa
AH
14398 if (TARGET_SPE)
14399 {
14400 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14401 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14402 cfun->machine->insn_chain_scanned_p
14403 = spe_func_has_64bit_regs_p () + 1;
14404 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14405 }
14406
a4f6c312 14407 /* Select which calling sequence. */
178274da 14408 info_ptr->abi = DEFAULT_ABI;
9878760c 14409
a4f6c312 14410 /* Calculate which registers need to be saved & save area size. */
4697a36c 14411 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14412 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14413 even if it currently looks like we won't. Reload may need it to
14414 get at a constant; if so, it will have already created a constant
14415 pool entry for it. */
2bfcf297 14416 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14417 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14418 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14419 && current_function_uses_const_pool
1db02437 14420 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14421 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14422 else
8070c91a
DJ
14423 first_gp = info_ptr->first_gp_reg_save;
14424
14425 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14426
a3170dc6
AH
14427 /* For the SPE, we have an additional upper 32-bits on each GPR.
14428 Ideally we should save the entire 64-bits only when the upper
14429 half is used in SIMD instructions. Since we only record
14430 registers live (not the size they are used in), this proves
14431 difficult because we'd have to traverse the instruction chain at
14432 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14433 so we opt to save the GPRs in 64-bits always if but one register
14434 gets used in 64-bits. Otherwise, all the registers in the frame
14435 get saved in 32-bits.
a3170dc6 14436
c19de7aa 14437 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14438 traditional GP save area will be empty. */
c19de7aa 14439 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14440 info_ptr->gp_size = 0;
14441
4697a36c
MM
14442 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14443 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14444
00b960c7
AH
14445 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14446 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14447 - info_ptr->first_altivec_reg_save);
14448
592696dd 14449 /* Does this function call anything? */
71f123ca
FS
14450 info_ptr->calls_p = (! current_function_is_leaf
14451 || cfun->machine->ra_needs_full_frame);
b6c9286a 14452
a4f6c312 14453 /* Determine if we need to save the link register. */
022123e6
AM
14454 if ((DEFAULT_ABI == ABI_AIX
14455 && current_function_profile
14456 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14457#ifdef TARGET_RELOCATABLE
14458 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14459#endif
14460 || (info_ptr->first_fp_reg_save != 64
14461 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
178274da 14462 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14463 || info_ptr->calls_p
14464 || rs6000_ra_ever_killed ())
4697a36c
MM
14465 {
14466 info_ptr->lr_save_p = 1;
1de43f85 14467 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14468 }
14469
9ebbca7d 14470 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14471 if (df_regs_ever_live_p (CR2_REGNO)
14472 || df_regs_ever_live_p (CR3_REGNO)
14473 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14474 {
14475 info_ptr->cr_save_p = 1;
178274da 14476 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14477 info_ptr->cr_size = reg_size;
14478 }
14479
83720594
RH
14480 /* If the current function calls __builtin_eh_return, then we need
14481 to allocate stack space for registers that will hold data for
14482 the exception handler. */
14483 if (current_function_calls_eh_return)
14484 {
14485 unsigned int i;
14486 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14487 continue;
a3170dc6
AH
14488
14489 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14490 ehrd_size = i * (TARGET_SPE_ABI
14491 && info_ptr->spe_64bit_regs_used != 0
14492 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14493 }
14494 else
14495 ehrd_size = 0;
14496
592696dd 14497 /* Determine various sizes. */
4697a36c
MM
14498 info_ptr->reg_size = reg_size;
14499 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14500 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14501 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14502 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14503 if (FRAME_GROWS_DOWNWARD)
14504 info_ptr->vars_size
5b667039
JJ
14505 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14506 + info_ptr->parm_size,
7d5175e1 14507 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14508 - (info_ptr->fixed_size + info_ptr->vars_size
14509 + info_ptr->parm_size);
00b960c7 14510
c19de7aa 14511 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14512 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14513 else
14514 info_ptr->spe_gp_size = 0;
14515
4d774ff8
HP
14516 if (TARGET_ALTIVEC_ABI)
14517 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14518 else
4d774ff8
HP
14519 info_ptr->vrsave_mask = 0;
14520
14521 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14522 info_ptr->vrsave_size = 4;
14523 else
14524 info_ptr->vrsave_size = 0;
b6c9286a 14525
d62294f5
FJ
14526 compute_save_world_info (info_ptr);
14527
592696dd 14528 /* Calculate the offsets. */
178274da 14529 switch (DEFAULT_ABI)
4697a36c 14530 {
b6c9286a 14531 case ABI_NONE:
24d304eb 14532 default:
37409796 14533 gcc_unreachable ();
b6c9286a
MM
14534
14535 case ABI_AIX:
ee890fe2 14536 case ABI_DARWIN:
b6c9286a
MM
14537 info_ptr->fp_save_offset = - info_ptr->fp_size;
14538 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14539
14540 if (TARGET_ALTIVEC_ABI)
14541 {
14542 info_ptr->vrsave_save_offset
14543 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14544
982afe02 14545 /* Align stack so vector save area is on a quadword boundary.
9278121c 14546 The padding goes above the vectors. */
00b960c7
AH
14547 if (info_ptr->altivec_size != 0)
14548 info_ptr->altivec_padding_size
9278121c 14549 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14550 else
14551 info_ptr->altivec_padding_size = 0;
14552
14553 info_ptr->altivec_save_offset
14554 = info_ptr->vrsave_save_offset
14555 - info_ptr->altivec_padding_size
14556 - info_ptr->altivec_size;
9278121c
GK
14557 gcc_assert (info_ptr->altivec_size == 0
14558 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14559
14560 /* Adjust for AltiVec case. */
14561 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14562 }
14563 else
14564 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14565 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14566 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14567 break;
14568
14569 case ABI_V4:
b6c9286a
MM
14570 info_ptr->fp_save_offset = - info_ptr->fp_size;
14571 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14572 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14573
c19de7aa 14574 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14575 {
14576 /* Align stack so SPE GPR save area is aligned on a
14577 double-word boundary. */
14578 if (info_ptr->spe_gp_size != 0)
14579 info_ptr->spe_padding_size
14580 = 8 - (-info_ptr->cr_save_offset % 8);
14581 else
14582 info_ptr->spe_padding_size = 0;
14583
14584 info_ptr->spe_gp_save_offset
14585 = info_ptr->cr_save_offset
14586 - info_ptr->spe_padding_size
14587 - info_ptr->spe_gp_size;
14588
14589 /* Adjust for SPE case. */
022123e6 14590 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14591 }
a3170dc6 14592 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14593 {
14594 info_ptr->vrsave_save_offset
14595 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14596
14597 /* Align stack so vector save area is on a quadword boundary. */
14598 if (info_ptr->altivec_size != 0)
14599 info_ptr->altivec_padding_size
14600 = 16 - (-info_ptr->vrsave_save_offset % 16);
14601 else
14602 info_ptr->altivec_padding_size = 0;
14603
14604 info_ptr->altivec_save_offset
14605 = info_ptr->vrsave_save_offset
14606 - info_ptr->altivec_padding_size
14607 - info_ptr->altivec_size;
14608
14609 /* Adjust for AltiVec case. */
022123e6 14610 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14611 }
14612 else
022123e6
AM
14613 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14614 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14615 info_ptr->lr_save_offset = reg_size;
14616 break;
4697a36c
MM
14617 }
14618
64045029 14619 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14620 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14621 + info_ptr->gp_size
14622 + info_ptr->altivec_size
14623 + info_ptr->altivec_padding_size
a3170dc6
AH
14624 + info_ptr->spe_gp_size
14625 + info_ptr->spe_padding_size
00b960c7
AH
14626 + ehrd_size
14627 + info_ptr->cr_size
022123e6 14628 + info_ptr->vrsave_size,
64045029 14629 save_align);
00b960c7 14630
44688022 14631 non_fixed_size = (info_ptr->vars_size
ff381587 14632 + info_ptr->parm_size
5b667039 14633 + info_ptr->save_size);
ff381587 14634
44688022
AM
14635 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14636 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14637
14638 /* Determine if we need to allocate any stack frame:
14639
a4f6c312
SS
14640 For AIX we need to push the stack if a frame pointer is needed
14641 (because the stack might be dynamically adjusted), if we are
14642 debugging, if we make calls, or if the sum of fp_save, gp_save,
14643 and local variables are more than the space needed to save all
14644 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14645 + 18*8 = 288 (GPR13 reserved).
ff381587 14646
a4f6c312
SS
14647 For V.4 we don't have the stack cushion that AIX uses, but assume
14648 that the debugger can handle stackless frames. */
ff381587
MM
14649
14650 if (info_ptr->calls_p)
14651 info_ptr->push_p = 1;
14652
178274da 14653 else if (DEFAULT_ABI == ABI_V4)
44688022 14654 info_ptr->push_p = non_fixed_size != 0;
ff381587 14655
178274da
AM
14656 else if (frame_pointer_needed)
14657 info_ptr->push_p = 1;
14658
14659 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14660 info_ptr->push_p = 1;
14661
ff381587 14662 else
44688022 14663 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14664
a4f6c312 14665 /* Zero offsets if we're not saving those registers. */
8dda1a21 14666 if (info_ptr->fp_size == 0)
4697a36c
MM
14667 info_ptr->fp_save_offset = 0;
14668
8dda1a21 14669 if (info_ptr->gp_size == 0)
4697a36c
MM
14670 info_ptr->gp_save_offset = 0;
14671
00b960c7
AH
14672 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14673 info_ptr->altivec_save_offset = 0;
14674
14675 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14676 info_ptr->vrsave_save_offset = 0;
14677
c19de7aa
AH
14678 if (! TARGET_SPE_ABI
14679 || info_ptr->spe_64bit_regs_used == 0
14680 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14681 info_ptr->spe_gp_save_offset = 0;
14682
c81fc13e 14683 if (! info_ptr->lr_save_p)
4697a36c
MM
14684 info_ptr->lr_save_offset = 0;
14685
c81fc13e 14686 if (! info_ptr->cr_save_p)
4697a36c
MM
14687 info_ptr->cr_save_offset = 0;
14688
14689 return info_ptr;
14690}
14691
c19de7aa
AH
14692/* Return true if the current function uses any GPRs in 64-bit SIMD
14693 mode. */
14694
14695static bool
863d938c 14696spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14697{
14698 rtx insns, insn;
14699
14700 /* Functions that save and restore all the call-saved registers will
14701 need to save/restore the registers in 64-bits. */
14702 if (current_function_calls_eh_return
14703 || current_function_calls_setjmp
14704 || current_function_has_nonlocal_goto)
14705 return true;
14706
14707 insns = get_insns ();
14708
14709 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14710 {
14711 if (INSN_P (insn))
14712 {
14713 rtx i;
14714
b5a5beb9
AH
14715 /* FIXME: This should be implemented with attributes...
14716
14717 (set_attr "spe64" "true")....then,
14718 if (get_spe64(insn)) return true;
14719
14720 It's the only reliable way to do the stuff below. */
14721
c19de7aa 14722 i = PATTERN (insn);
f82f556d
AH
14723 if (GET_CODE (i) == SET)
14724 {
14725 enum machine_mode mode = GET_MODE (SET_SRC (i));
14726
14727 if (SPE_VECTOR_MODE (mode))
14728 return true;
4d4447b5
PB
14729 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14730 || mode == DDmode || mode == TDmode))
f82f556d
AH
14731 return true;
14732 }
c19de7aa
AH
14733 }
14734 }
14735
14736 return false;
14737}
14738
d1d0c603 14739static void
a2369ed3 14740debug_stack_info (rs6000_stack_t *info)
9878760c 14741{
d330fd93 14742 const char *abi_string;
24d304eb 14743
c81fc13e 14744 if (! info)
4697a36c
MM
14745 info = rs6000_stack_info ();
14746
14747 fprintf (stderr, "\nStack information for function %s:\n",
14748 ((current_function_decl && DECL_NAME (current_function_decl))
14749 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14750 : "<unknown>"));
14751
24d304eb
RK
14752 switch (info->abi)
14753 {
b6c9286a
MM
14754 default: abi_string = "Unknown"; break;
14755 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14756 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14757 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14758 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14759 }
14760
14761 fprintf (stderr, "\tABI = %5s\n", abi_string);
14762
00b960c7
AH
14763 if (TARGET_ALTIVEC_ABI)
14764 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14765
a3170dc6
AH
14766 if (TARGET_SPE_ABI)
14767 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14768
4697a36c
MM
14769 if (info->first_gp_reg_save != 32)
14770 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14771
14772 if (info->first_fp_reg_save != 64)
14773 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14774
00b960c7
AH
14775 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14776 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14777 info->first_altivec_reg_save);
14778
4697a36c
MM
14779 if (info->lr_save_p)
14780 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14781
4697a36c
MM
14782 if (info->cr_save_p)
14783 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14784
00b960c7
AH
14785 if (info->vrsave_mask)
14786 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14787
4697a36c
MM
14788 if (info->push_p)
14789 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14790
14791 if (info->calls_p)
14792 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14793
4697a36c
MM
14794 if (info->gp_save_offset)
14795 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14796
14797 if (info->fp_save_offset)
14798 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14799
00b960c7
AH
14800 if (info->altivec_save_offset)
14801 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14802 info->altivec_save_offset);
14803
a3170dc6
AH
14804 if (info->spe_gp_save_offset)
14805 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14806 info->spe_gp_save_offset);
14807
00b960c7
AH
14808 if (info->vrsave_save_offset)
14809 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14810 info->vrsave_save_offset);
14811
4697a36c
MM
14812 if (info->lr_save_offset)
14813 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14814
14815 if (info->cr_save_offset)
14816 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14817
14818 if (info->varargs_save_offset)
14819 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14820
14821 if (info->total_size)
d1d0c603
JJ
14822 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14823 info->total_size);
4697a36c 14824
4697a36c 14825 if (info->vars_size)
d1d0c603
JJ
14826 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14827 info->vars_size);
4697a36c
MM
14828
14829 if (info->parm_size)
14830 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14831
14832 if (info->fixed_size)
14833 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14834
14835 if (info->gp_size)
14836 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14837
a3170dc6
AH
14838 if (info->spe_gp_size)
14839 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14840
4697a36c
MM
14841 if (info->fp_size)
14842 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14843
00b960c7
AH
14844 if (info->altivec_size)
14845 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14846
14847 if (info->vrsave_size)
14848 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14849
14850 if (info->altivec_padding_size)
14851 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14852 info->altivec_padding_size);
14853
a3170dc6
AH
14854 if (info->spe_padding_size)
14855 fprintf (stderr, "\tspe_padding_size = %5d\n",
14856 info->spe_padding_size);
14857
4697a36c
MM
14858 if (info->cr_size)
14859 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14860
14861 if (info->save_size)
14862 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14863
14864 if (info->reg_size != 4)
14865 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14866
14867 fprintf (stderr, "\n");
9878760c 14868}
71f123ca
FS
14869
14870rtx
a2369ed3 14871rs6000_return_addr (int count, rtx frame)
71f123ca 14872{
a4f6c312
SS
14873 /* Currently we don't optimize very well between prolog and body
14874 code and for PIC code the code can be actually quite bad, so
14875 don't try to be too clever here. */
f1384257 14876 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14877 {
14878 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14879
14880 return
14881 gen_rtx_MEM
14882 (Pmode,
14883 memory_address
14884 (Pmode,
14885 plus_constant (copy_to_reg
14886 (gen_rtx_MEM (Pmode,
14887 memory_address (Pmode, frame))),
14888 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14889 }
14890
8c29550d 14891 cfun->machine->ra_need_lr = 1;
1de43f85 14892 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14893}
14894
5e1bf043
DJ
14895/* Say whether a function is a candidate for sibcall handling or not.
14896 We do not allow indirect calls to be optimized into sibling calls.
14897 Also, we can't do it if there are any vector parameters; there's
14898 nowhere to put the VRsave code so it works; note that functions with
14899 vector parameters are required to have a prototype, so the argument
14900 type info must be available here. (The tail recursion case can work
14901 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14902static bool
a2369ed3 14903rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14904{
14905 tree type;
4977bab6 14906 if (decl)
5e1bf043
DJ
14907 {
14908 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14909 {
4977bab6 14910 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14911 type; type = TREE_CHAIN (type))
14912 {
c15b529f 14913 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14914 return false;
5e1bf043 14915 }
c4ad648e 14916 }
5e1bf043 14917 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14918 || ((*targetm.binds_local_p) (decl)
14919 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14920 {
4977bab6 14921 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14922
14923 if (!lookup_attribute ("longcall", attr_list)
14924 || lookup_attribute ("shortcall", attr_list))
4977bab6 14925 return true;
2bcc50d0 14926 }
5e1bf043 14927 }
4977bab6 14928 return false;
5e1bf043
DJ
14929}
14930
e7e64a25
AS
14931/* NULL if INSN insn is valid within a low-overhead loop.
14932 Otherwise return why doloop cannot be applied.
9419649c
DE
14933 PowerPC uses the COUNT register for branch on table instructions. */
14934
e7e64a25 14935static const char *
3101faab 14936rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14937{
14938 if (CALL_P (insn))
e7e64a25 14939 return "Function call in the loop.";
9419649c
DE
14940
14941 if (JUMP_P (insn)
14942 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14943 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14944 return "Computed branch in the loop.";
9419649c 14945
e7e64a25 14946 return NULL;
9419649c
DE
14947}
14948
71f123ca 14949static int
863d938c 14950rs6000_ra_ever_killed (void)
71f123ca
FS
14951{
14952 rtx top;
5e1bf043
DJ
14953 rtx reg;
14954 rtx insn;
71f123ca 14955
dd292d0a 14956 if (current_function_is_thunk)
71f123ca 14957 return 0;
eb0424da 14958
36f7e964
AH
14959 /* regs_ever_live has LR marked as used if any sibcalls are present,
14960 but this should not force saving and restoring in the
14961 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14962 clobbers LR, so that is inappropriate. */
36f7e964 14963
5e1bf043
DJ
14964 /* Also, the prologue can generate a store into LR that
14965 doesn't really count, like this:
36f7e964 14966
5e1bf043
DJ
14967 move LR->R0
14968 bcl to set PIC register
14969 move LR->R31
14970 move R0->LR
36f7e964
AH
14971
14972 When we're called from the epilogue, we need to avoid counting
14973 this as a store. */
f676971a 14974
71f123ca
FS
14975 push_topmost_sequence ();
14976 top = get_insns ();
14977 pop_topmost_sequence ();
1de43f85 14978 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14979
5e1bf043
DJ
14980 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14981 {
14982 if (INSN_P (insn))
14983 {
022123e6
AM
14984 if (CALL_P (insn))
14985 {
14986 if (!SIBLING_CALL_P (insn))
14987 return 1;
14988 }
1de43f85 14989 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14990 return 1;
36f7e964
AH
14991 else if (set_of (reg, insn) != NULL_RTX
14992 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14993 return 1;
14994 }
14995 }
14996 return 0;
71f123ca 14997}
4697a36c 14998\f
9ebbca7d 14999/* Emit instructions needed to load the TOC register.
c7ca610e 15000 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15001 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15002
15003void
a2369ed3 15004rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15005{
6fb5fa3c 15006 rtx dest;
1db02437 15007 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15008
7f970b70 15009 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15010 {
7f970b70 15011 char buf[30];
e65a3857 15012 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15013
15014 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15015 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15016 if (flag_pic == 2)
15017 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15018 else
15019 got = rs6000_got_sym ();
15020 tmp1 = tmp2 = dest;
15021 if (!fromprolog)
15022 {
15023 tmp1 = gen_reg_rtx (Pmode);
15024 tmp2 = gen_reg_rtx (Pmode);
15025 }
6fb5fa3c
DB
15026 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15027 emit_move_insn (tmp1,
1de43f85 15028 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15029 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15030 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15031 }
15032 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15033 {
6fb5fa3c 15034 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15035 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15036 }
15037 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15038 {
15039 char buf[30];
20b71b17
AM
15040 rtx temp0 = (fromprolog
15041 ? gen_rtx_REG (Pmode, 0)
15042 : gen_reg_rtx (Pmode));
20b71b17 15043
20b71b17
AM
15044 if (fromprolog)
15045 {
ccbca5e4 15046 rtx symF, symL;
38c1f2d7 15047
20b71b17
AM
15048 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15049 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15050
20b71b17
AM
15051 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15052 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15053
6fb5fa3c
DB
15054 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15055 emit_move_insn (dest,
1de43f85 15056 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15057 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15058 }
15059 else
20b71b17
AM
15060 {
15061 rtx tocsym;
20b71b17
AM
15062
15063 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15064 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15065 emit_move_insn (dest,
1de43f85 15066 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15067 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15068 }
6fb5fa3c 15069 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15070 }
20b71b17
AM
15071 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15072 {
15073 /* This is for AIX code running in non-PIC ELF32. */
15074 char buf[30];
15075 rtx realsym;
15076 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15077 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15078
6fb5fa3c
DB
15079 emit_insn (gen_elf_high (dest, realsym));
15080 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15081 }
37409796 15082 else
9ebbca7d 15083 {
37409796 15084 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15085
9ebbca7d 15086 if (TARGET_32BIT)
6fb5fa3c 15087 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15088 else
6fb5fa3c 15089 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15090 }
15091}
15092
d1d0c603
JJ
15093/* Emit instructions to restore the link register after determining where
15094 its value has been stored. */
15095
15096void
15097rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15098{
15099 rs6000_stack_t *info = rs6000_stack_info ();
15100 rtx operands[2];
15101
15102 operands[0] = source;
15103 operands[1] = scratch;
15104
15105 if (info->lr_save_p)
15106 {
15107 rtx frame_rtx = stack_pointer_rtx;
15108 HOST_WIDE_INT sp_offset = 0;
15109 rtx tmp;
15110
15111 if (frame_pointer_needed
15112 || current_function_calls_alloca
15113 || info->total_size > 32767)
15114 {
0be76840 15115 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15116 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15117 frame_rtx = operands[1];
15118 }
15119 else if (info->push_p)
15120 sp_offset = info->total_size;
15121
15122 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15123 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15124 emit_move_insn (tmp, operands[0]);
15125 }
15126 else
1de43f85 15127 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15128}
15129
4862826d 15130static GTY(()) alias_set_type set = -1;
f103e34d 15131
4862826d 15132alias_set_type
863d938c 15133get_TOC_alias_set (void)
9ebbca7d 15134{
f103e34d
GK
15135 if (set == -1)
15136 set = new_alias_set ();
15137 return set;
f676971a 15138}
9ebbca7d 15139
c1207243 15140/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15141 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15142 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15143#if TARGET_ELF
3c9eb5f4 15144static int
f676971a 15145uses_TOC (void)
9ebbca7d 15146{
c4501e62 15147 rtx insn;
38c1f2d7 15148
c4501e62
JJ
15149 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15150 if (INSN_P (insn))
15151 {
15152 rtx pat = PATTERN (insn);
15153 int i;
9ebbca7d 15154
f676971a 15155 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15156 for (i = 0; i < XVECLEN (pat, 0); i++)
15157 {
15158 rtx sub = XVECEXP (pat, 0, i);
15159 if (GET_CODE (sub) == USE)
15160 {
15161 sub = XEXP (sub, 0);
15162 if (GET_CODE (sub) == UNSPEC
15163 && XINT (sub, 1) == UNSPEC_TOC)
15164 return 1;
15165 }
15166 }
15167 }
15168 return 0;
9ebbca7d 15169}
c954844a 15170#endif
38c1f2d7 15171
9ebbca7d 15172rtx
f676971a 15173create_TOC_reference (rtx symbol)
9ebbca7d 15174{
b3a13419 15175 if (!can_create_pseudo_p ())
6fb5fa3c 15176 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15177 return gen_rtx_PLUS (Pmode,
a8a05998 15178 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15179 gen_rtx_CONST (Pmode,
15180 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15181 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15182}
38c1f2d7 15183
fc4767bb
JJ
15184/* If _Unwind_* has been called from within the same module,
15185 toc register is not guaranteed to be saved to 40(1) on function
15186 entry. Save it there in that case. */
c7ca610e 15187
9ebbca7d 15188void
863d938c 15189rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15190{
15191 rtx mem;
15192 rtx stack_top = gen_reg_rtx (Pmode);
15193 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15194 rtx opcode = gen_reg_rtx (SImode);
15195 rtx tocompare = gen_reg_rtx (SImode);
15196 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15197
8308679f 15198 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15199 emit_move_insn (stack_top, mem);
15200
8308679f
DE
15201 mem = gen_frame_mem (Pmode,
15202 gen_rtx_PLUS (Pmode, stack_top,
15203 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15204 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15205 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15206 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15207 : 0xE8410028, SImode));
9ebbca7d 15208
fc4767bb 15209 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15210 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15211 no_toc_save_needed);
9ebbca7d 15212
8308679f
DE
15213 mem = gen_frame_mem (Pmode,
15214 gen_rtx_PLUS (Pmode, stack_top,
15215 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15216 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15217 emit_label (no_toc_save_needed);
9ebbca7d 15218}
38c1f2d7 15219\f
0be76840
DE
15220/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15221 and the change to the stack pointer. */
ba4828e0 15222
9ebbca7d 15223static void
863d938c 15224rs6000_emit_stack_tie (void)
9ebbca7d 15225{
0be76840
DE
15226 rtx mem = gen_frame_mem (BLKmode,
15227 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15228
9ebbca7d
GK
15229 emit_insn (gen_stack_tie (mem));
15230}
38c1f2d7 15231
9ebbca7d
GK
15232/* Emit the correct code for allocating stack space, as insns.
15233 If COPY_R12, make sure a copy of the old frame is left in r12.
15234 The generated code may use hard register 0 as a temporary. */
15235
15236static void
a2369ed3 15237rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 15238{
9ebbca7d
GK
15239 rtx insn;
15240 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15241 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15242 rtx todec = gen_int_mode (-size, Pmode);
15243
15244 if (INTVAL (todec) != -size)
15245 {
d4ee4d25 15246 warning (0, "stack frame too large");
61168ff1
RS
15247 emit_insn (gen_trap ());
15248 return;
15249 }
a157febd
GK
15250
15251 if (current_function_limit_stack)
15252 {
15253 if (REG_P (stack_limit_rtx)
f676971a 15254 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15255 && REGNO (stack_limit_rtx) <= 31)
15256 {
5b71a4e7 15257 emit_insn (TARGET_32BIT
9ebbca7d
GK
15258 ? gen_addsi3 (tmp_reg,
15259 stack_limit_rtx,
15260 GEN_INT (size))
15261 : gen_adddi3 (tmp_reg,
15262 stack_limit_rtx,
15263 GEN_INT (size)));
5b71a4e7 15264
9ebbca7d
GK
15265 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15266 const0_rtx));
a157febd
GK
15267 }
15268 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15269 && TARGET_32BIT
f607bc57 15270 && DEFAULT_ABI == ABI_V4)
a157febd 15271 {
9ebbca7d 15272 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15273 gen_rtx_PLUS (Pmode,
15274 stack_limit_rtx,
9ebbca7d 15275 GEN_INT (size)));
5b71a4e7 15276
9ebbca7d
GK
15277 emit_insn (gen_elf_high (tmp_reg, toload));
15278 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15279 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15280 const0_rtx));
a157febd
GK
15281 }
15282 else
d4ee4d25 15283 warning (0, "stack limit expression is not supported");
a157febd
GK
15284 }
15285
9ebbca7d
GK
15286 if (copy_r12 || ! TARGET_UPDATE)
15287 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15288
38c1f2d7
MM
15289 if (TARGET_UPDATE)
15290 {
9ebbca7d 15291 if (size > 32767)
38c1f2d7 15292 {
9ebbca7d 15293 /* Need a note here so that try_split doesn't get confused. */
9390387d 15294 if (get_last_insn () == NULL_RTX)
2e040219 15295 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15296 insn = emit_move_insn (tmp_reg, todec);
15297 try_split (PATTERN (insn), insn, 0);
15298 todec = tmp_reg;
38c1f2d7 15299 }
5b71a4e7
DE
15300
15301 insn = emit_insn (TARGET_32BIT
15302 ? gen_movsi_update (stack_reg, stack_reg,
15303 todec, stack_reg)
c4ad648e 15304 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15305 todec, stack_reg));
38c1f2d7
MM
15306 }
15307 else
15308 {
5b71a4e7
DE
15309 insn = emit_insn (TARGET_32BIT
15310 ? gen_addsi3 (stack_reg, stack_reg, todec)
15311 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15312 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15313 gen_rtx_REG (Pmode, 12));
15314 }
f676971a 15315
9ebbca7d 15316 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15317 REG_NOTES (insn) =
9ebbca7d 15318 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15319 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15320 gen_rtx_PLUS (Pmode, stack_reg,
15321 GEN_INT (-size))),
15322 REG_NOTES (insn));
15323}
15324
a4f6c312
SS
15325/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15326 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15327 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15328 deduce these equivalences by itself so it wasn't necessary to hold
15329 its hand so much. */
9ebbca7d
GK
15330
15331static void
f676971a 15332rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15333 rtx reg2, rtx rreg)
9ebbca7d
GK
15334{
15335 rtx real, temp;
15336
e56c4463
JL
15337 /* copy_rtx will not make unique copies of registers, so we need to
15338 ensure we don't have unwanted sharing here. */
15339 if (reg == reg2)
15340 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15341
15342 if (reg == rreg)
15343 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15344
9ebbca7d
GK
15345 real = copy_rtx (PATTERN (insn));
15346
89e7058f
AH
15347 if (reg2 != NULL_RTX)
15348 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15349
15350 real = replace_rtx (real, reg,
9ebbca7d
GK
15351 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15352 STACK_POINTER_REGNUM),
15353 GEN_INT (val)));
f676971a 15354
9ebbca7d
GK
15355 /* We expect that 'real' is either a SET or a PARALLEL containing
15356 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15357 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15358
15359 if (GET_CODE (real) == SET)
15360 {
15361 rtx set = real;
f676971a 15362
9ebbca7d
GK
15363 temp = simplify_rtx (SET_SRC (set));
15364 if (temp)
15365 SET_SRC (set) = temp;
15366 temp = simplify_rtx (SET_DEST (set));
15367 if (temp)
15368 SET_DEST (set) = temp;
15369 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15370 {
9ebbca7d
GK
15371 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15372 if (temp)
15373 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15374 }
38c1f2d7 15375 }
37409796 15376 else
9ebbca7d
GK
15377 {
15378 int i;
37409796
NS
15379
15380 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15381 for (i = 0; i < XVECLEN (real, 0); i++)
15382 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15383 {
15384 rtx set = XVECEXP (real, 0, i);
f676971a 15385
9ebbca7d
GK
15386 temp = simplify_rtx (SET_SRC (set));
15387 if (temp)
15388 SET_SRC (set) = temp;
15389 temp = simplify_rtx (SET_DEST (set));
15390 if (temp)
15391 SET_DEST (set) = temp;
15392 if (GET_CODE (SET_DEST (set)) == MEM)
15393 {
15394 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15395 if (temp)
15396 XEXP (SET_DEST (set), 0) = temp;
15397 }
15398 RTX_FRAME_RELATED_P (set) = 1;
15399 }
15400 }
c19de7aa 15401
9ebbca7d
GK
15402 RTX_FRAME_RELATED_P (insn) = 1;
15403 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15404 real,
15405 REG_NOTES (insn));
38c1f2d7
MM
15406}
15407
00b960c7
AH
15408/* Returns an insn that has a vrsave set operation with the
15409 appropriate CLOBBERs. */
15410
15411static rtx
a2369ed3 15412generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15413{
15414 int nclobs, i;
15415 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15416 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15417
a004eb82
AH
15418 clobs[0]
15419 = gen_rtx_SET (VOIDmode,
15420 vrsave,
15421 gen_rtx_UNSPEC_VOLATILE (SImode,
15422 gen_rtvec (2, reg, vrsave),
3aca4bff 15423 UNSPECV_SET_VRSAVE));
00b960c7
AH
15424
15425 nclobs = 1;
15426
9aa86737
AH
15427 /* We need to clobber the registers in the mask so the scheduler
15428 does not move sets to VRSAVE before sets of AltiVec registers.
15429
15430 However, if the function receives nonlocal gotos, reload will set
15431 all call saved registers live. We will end up with:
15432
15433 (set (reg 999) (mem))
15434 (parallel [ (set (reg vrsave) (unspec blah))
15435 (clobber (reg 999))])
15436
15437 The clobber will cause the store into reg 999 to be dead, and
15438 flow will attempt to delete an epilogue insn. In this case, we
15439 need an unspec use/set of the register. */
00b960c7
AH
15440
15441 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15442 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15443 {
15444 if (!epiloguep || call_used_regs [i])
15445 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15446 gen_rtx_REG (V4SImode, i));
15447 else
15448 {
15449 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15450
15451 clobs[nclobs++]
a004eb82
AH
15452 = gen_rtx_SET (VOIDmode,
15453 reg,
15454 gen_rtx_UNSPEC (V4SImode,
15455 gen_rtvec (1, reg), 27));
9aa86737
AH
15456 }
15457 }
00b960c7
AH
15458
15459 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15460
15461 for (i = 0; i < nclobs; ++i)
15462 XVECEXP (insn, 0, i) = clobs[i];
15463
15464 return insn;
15465}
15466
89e7058f
AH
15467/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15468 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15469
15470static void
f676971a 15471emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15472 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15473{
15474 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15475 rtx replacea, replaceb;
15476
15477 int_rtx = GEN_INT (offset);
15478
15479 /* Some cases that need register indexed addressing. */
15480 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15481 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15482 || (TARGET_SPE_ABI
15483 && SPE_VECTOR_MODE (mode)
15484 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15485 {
15486 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15487 flow path of instructions in the prologue. */
89e7058f
AH
15488 offset_rtx = gen_rtx_REG (Pmode, 11);
15489 emit_move_insn (offset_rtx, int_rtx);
15490
15491 replacea = offset_rtx;
15492 replaceb = int_rtx;
15493 }
15494 else
15495 {
15496 offset_rtx = int_rtx;
15497 replacea = NULL_RTX;
15498 replaceb = NULL_RTX;
15499 }
15500
15501 reg = gen_rtx_REG (mode, regno);
15502 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15503 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15504
15505 insn = emit_move_insn (mem, reg);
15506
15507 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15508}
15509
a3170dc6
AH
15510/* Emit an offset memory reference suitable for a frame store, while
15511 converting to a valid addressing mode. */
15512
15513static rtx
a2369ed3 15514gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15515{
15516 rtx int_rtx, offset_rtx;
15517
15518 int_rtx = GEN_INT (offset);
15519
4d4cbc0e 15520 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15521 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15522 {
15523 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15524 emit_move_insn (offset_rtx, int_rtx);
15525 }
15526 else
15527 offset_rtx = int_rtx;
15528
0be76840 15529 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15530}
15531
6d0a8091
DJ
15532/* Look for user-defined global regs. We should not save and restore these,
15533 and cannot use stmw/lmw if there are any in its range. */
15534
15535static bool
15536no_global_regs_above (int first_greg)
15537{
15538 int i;
15539 for (i = 0; i < 32 - first_greg; i++)
15540 if (global_regs[first_greg + i])
15541 return false;
15542 return true;
15543}
15544
699c914a
MS
15545#ifndef TARGET_FIX_AND_CONTINUE
15546#define TARGET_FIX_AND_CONTINUE 0
15547#endif
15548
52ff33d0
NF
15549/* Determine whether the gp REG is really used. */
15550
15551static bool
15552rs6000_reg_live_or_pic_offset_p (int reg)
15553{
6fb5fa3c 15554 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15555 && (!call_used_regs[reg]
15556 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15557 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15558 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15559 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15560 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15561}
15562
9ebbca7d
GK
15563/* Emit function prologue as insns. */
15564
9878760c 15565void
863d938c 15566rs6000_emit_prologue (void)
9878760c 15567{
4697a36c 15568 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15569 enum machine_mode reg_mode = Pmode;
327e5343 15570 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15571 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15572 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15573 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15574 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15575 rtx insn;
15576 int saving_FPRs_inline;
15577 int using_store_multiple;
15578 HOST_WIDE_INT sp_offset = 0;
f676971a 15579
699c914a
MS
15580 if (TARGET_FIX_AND_CONTINUE)
15581 {
15582 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15583 address by modifying the first 5 instructions of the function
699c914a
MS
15584 to branch to the overriding function. This is necessary to
15585 permit function pointers that point to the old function to
15586 actually forward to the new function. */
15587 emit_insn (gen_nop ());
15588 emit_insn (gen_nop ());
de2ab0ca 15589 emit_insn (gen_nop ());
699c914a
MS
15590 emit_insn (gen_nop ());
15591 emit_insn (gen_nop ());
15592 }
15593
15594 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15595 {
15596 reg_mode = V2SImode;
15597 reg_size = 8;
15598 }
a3170dc6 15599
9ebbca7d 15600 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15601 && (!TARGET_SPE_ABI
15602 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15603 && info->first_gp_reg_save < 31
15604 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15605 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15606 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15607 || current_function_calls_eh_return
8c29550d 15608 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15609
15610 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15611 if (! WORLD_SAVE_P (info)
15612 && info->push_p
acd0b319
AM
15613 && (DEFAULT_ABI == ABI_V4
15614 || current_function_calls_eh_return))
9ebbca7d
GK
15615 {
15616 if (info->total_size < 32767)
15617 sp_offset = info->total_size;
15618 else
15619 frame_reg_rtx = frame_ptr_rtx;
f676971a 15620 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15621 (frame_reg_rtx != sp_reg_rtx
15622 && (info->cr_save_p
15623 || info->lr_save_p
15624 || info->first_fp_reg_save < 64
15625 || info->first_gp_reg_save < 32
15626 )));
15627 if (frame_reg_rtx != sp_reg_rtx)
15628 rs6000_emit_stack_tie ();
15629 }
15630
d62294f5 15631 /* Handle world saves specially here. */
f57fe068 15632 if (WORLD_SAVE_P (info))
d62294f5
FJ
15633 {
15634 int i, j, sz;
15635 rtx treg;
15636 rtvec p;
22fa69da 15637 rtx reg0;
d62294f5
FJ
15638
15639 /* save_world expects lr in r0. */
22fa69da 15640 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15641 if (info->lr_save_p)
c4ad648e 15642 {
22fa69da 15643 insn = emit_move_insn (reg0,
1de43f85 15644 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15645 RTX_FRAME_RELATED_P (insn) = 1;
15646 }
d62294f5
FJ
15647
15648 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15649 assumptions about the offsets of various bits of the stack
992d08b1 15650 frame. */
37409796
NS
15651 gcc_assert (info->gp_save_offset == -220
15652 && info->fp_save_offset == -144
15653 && info->lr_save_offset == 8
15654 && info->cr_save_offset == 4
15655 && info->push_p
15656 && info->lr_save_p
15657 && (!current_function_calls_eh_return
15658 || info->ehrd_offset == -432)
15659 && info->vrsave_save_offset == -224
22fa69da 15660 && info->altivec_save_offset == -416);
d62294f5
FJ
15661
15662 treg = gen_rtx_REG (SImode, 11);
15663 emit_move_insn (treg, GEN_INT (-info->total_size));
15664
15665 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15666 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15667
15668 /* Preserve CR2 for save_world prologues */
22fa69da 15669 sz = 5;
d62294f5
FJ
15670 sz += 32 - info->first_gp_reg_save;
15671 sz += 64 - info->first_fp_reg_save;
15672 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15673 p = rtvec_alloc (sz);
15674 j = 0;
15675 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15676 gen_rtx_REG (SImode,
1de43f85 15677 LR_REGNO));
d62294f5 15678 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15679 gen_rtx_SYMBOL_REF (Pmode,
15680 "*save_world"));
d62294f5 15681 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15682 properly. */
15683 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15684 {
15685 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15686 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15687 GEN_INT (info->fp_save_offset
15688 + sp_offset + 8 * i));
0be76840 15689 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15690
15691 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15692 }
d62294f5 15693 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15694 {
15695 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15696 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15697 GEN_INT (info->altivec_save_offset
15698 + sp_offset + 16 * i));
0be76840 15699 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15700
15701 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15702 }
d62294f5 15703 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15704 {
15705 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15706 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15707 GEN_INT (info->gp_save_offset
15708 + sp_offset + reg_size * i));
0be76840 15709 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15710
15711 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15712 }
15713
15714 {
15715 /* CR register traditionally saved as CR2. */
15716 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15717 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15718 GEN_INT (info->cr_save_offset
15719 + sp_offset));
0be76840 15720 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15721
15722 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15723 }
22fa69da
GK
15724 /* Explain about use of R0. */
15725 if (info->lr_save_p)
15726 {
15727 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15728 GEN_INT (info->lr_save_offset
15729 + sp_offset));
15730 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15731
22fa69da
GK
15732 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15733 }
15734 /* Explain what happens to the stack pointer. */
15735 {
15736 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15737 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15738 }
d62294f5
FJ
15739
15740 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15741 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15742 treg, GEN_INT (-info->total_size));
15743 sp_offset = info->total_size;
d62294f5
FJ
15744 }
15745
9ebbca7d 15746 /* If we use the link register, get it into r0. */
f57fe068 15747 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15748 {
52ff33d0
NF
15749 rtx addr, reg, mem;
15750
f8a57be8 15751 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15752 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15753 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15754
15755 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15756 GEN_INT (info->lr_save_offset + sp_offset));
15757 reg = gen_rtx_REG (Pmode, 0);
15758 mem = gen_rtx_MEM (Pmode, addr);
15759 /* This should not be of rs6000_sr_alias_set, because of
15760 __builtin_return_address. */
15761
15762 insn = emit_move_insn (mem, reg);
15763 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15764 NULL_RTX, NULL_RTX);
f8a57be8 15765 }
9ebbca7d
GK
15766
15767 /* If we need to save CR, put it into r12. */
f57fe068 15768 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15769 {
f8a57be8 15770 rtx set;
f676971a 15771
9ebbca7d 15772 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15773 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15774 RTX_FRAME_RELATED_P (insn) = 1;
15775 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15776 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15777 But that's OK. All we have to do is specify that _one_ condition
15778 code register is saved in this stack slot. The thrower's epilogue
15779 will then restore all the call-saved registers.
15780 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15781 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15782 gen_rtx_REG (SImode, CR2_REGNO));
15783 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15784 set,
15785 REG_NOTES (insn));
9ebbca7d
GK
15786 }
15787
a4f6c312
SS
15788 /* Do any required saving of fpr's. If only one or two to save, do
15789 it ourselves. Otherwise, call function. */
f57fe068 15790 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15791 {
15792 int i;
15793 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15794 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15795 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15796 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15797 info->first_fp_reg_save + i,
15798 info->fp_save_offset + sp_offset + 8 * i,
15799 info->total_size);
9ebbca7d 15800 }
f57fe068 15801 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15802 {
15803 int i;
15804 char rname[30];
520a57c8 15805 const char *alloc_rname;
9ebbca7d
GK
15806 rtvec p;
15807 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15808
15809 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15810 gen_rtx_REG (Pmode,
1de43f85 15811 LR_REGNO));
9ebbca7d
GK
15812 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15813 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15814 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15815 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15816 gen_rtx_SYMBOL_REF (Pmode,
15817 alloc_rname));
15818 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15819 {
15820 rtx addr, reg, mem;
15821 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15822 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15823 GEN_INT (info->fp_save_offset
9ebbca7d 15824 + sp_offset + 8*i));
0be76840 15825 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15826
15827 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15828 }
15829 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15830 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15831 NULL_RTX, NULL_RTX);
15832 }
b6c9286a 15833
9ebbca7d
GK
15834 /* Save GPRs. This is done as a PARALLEL if we are using
15835 the store-multiple instructions. */
f57fe068 15836 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15837 {
308c142a 15838 rtvec p;
9ebbca7d
GK
15839 int i;
15840 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15841 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15842 {
15843 rtx addr, reg, mem;
15844 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15845 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15846 GEN_INT (info->gp_save_offset
15847 + sp_offset
9ebbca7d 15848 + reg_size * i));
0be76840 15849 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15850
15851 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15852 }
15853 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15854 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15855 NULL_RTX, NULL_RTX);
b6c9286a 15856 }
52ff33d0
NF
15857 else if (!WORLD_SAVE_P (info)
15858 && TARGET_SPE_ABI
15859 && info->spe_64bit_regs_used != 0
15860 && info->first_gp_reg_save != 32)
15861 {
15862 int i;
15863 rtx spe_save_area_ptr;
15864 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15865 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15866 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15867
15868 /* Determine whether we can address all of the registers that need
15869 to be saved with an offset from the stack pointer that fits in
15870 the small const field for SPE memory instructions. */
15871 int spe_regs_addressable_via_sp
15872 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15873 + (32 - info->first_gp_reg_save - 1) * reg_size);
15874 int spe_offset;
15875
15876 if (spe_regs_addressable_via_sp)
15877 {
30895f30 15878 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15879 spe_offset = info->spe_gp_save_offset + sp_offset;
15880 }
15881 else
15882 {
15883 /* Make r11 point to the start of the SPE save area. We need
15884 to be careful here if r11 is holding the static chain. If
15885 it is, then temporarily save it in r0. We would use r0 as
15886 our base register here, but using r0 as a base register in
15887 loads and stores means something different from what we
15888 would like. */
15889 if (using_static_chain_p)
15890 {
15891 rtx r0 = gen_rtx_REG (Pmode, 0);
15892
15893 gcc_assert (info->first_gp_reg_save > 11);
15894
15895 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15896 }
15897
15898 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15899 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15900 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15901
15902 spe_offset = 0;
15903 }
15904
15905 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15906 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15907 {
15908 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15909 rtx offset, addr, mem;
15910
15911 /* We're doing all this to ensure that the offset fits into
15912 the immediate offset of 'evstdd'. */
15913 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15914
15915 offset = GEN_INT (reg_size * i + spe_offset);
15916 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15917 mem = gen_rtx_MEM (V2SImode, addr);
15918
15919 insn = emit_move_insn (mem, reg);
15920
15921 rs6000_frame_related (insn, spe_save_area_ptr,
15922 info->spe_gp_save_offset
15923 + sp_offset + reg_size * i,
15924 offset, const0_rtx);
15925 }
15926
15927 /* Move the static chain pointer back. */
15928 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15929 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15930 }
f57fe068 15931 else if (!WORLD_SAVE_P (info))
b6c9286a 15932 {
9ebbca7d
GK
15933 int i;
15934 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15935 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15936 {
15937 rtx addr, reg, mem;
15938 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15939
52ff33d0
NF
15940 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15941 GEN_INT (info->gp_save_offset
15942 + sp_offset
15943 + reg_size * i));
15944 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15945
52ff33d0
NF
15946 insn = emit_move_insn (mem, reg);
15947 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15948 NULL_RTX, NULL_RTX);
15949 }
9ebbca7d
GK
15950 }
15951
83720594
RH
15952 /* ??? There's no need to emit actual instructions here, but it's the
15953 easiest way to get the frame unwind information emitted. */
22fa69da 15954 if (current_function_calls_eh_return)
83720594 15955 {
78e1b90d
DE
15956 unsigned int i, regno;
15957
fc4767bb
JJ
15958 /* In AIX ABI we need to pretend we save r2 here. */
15959 if (TARGET_AIX)
15960 {
15961 rtx addr, reg, mem;
15962
15963 reg = gen_rtx_REG (reg_mode, 2);
15964 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15965 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15966 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15967
15968 insn = emit_move_insn (mem, reg);
f676971a 15969 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15970 NULL_RTX, NULL_RTX);
15971 PATTERN (insn) = gen_blockage ();
15972 }
15973
83720594
RH
15974 for (i = 0; ; ++i)
15975 {
83720594
RH
15976 regno = EH_RETURN_DATA_REGNO (i);
15977 if (regno == INVALID_REGNUM)
15978 break;
15979
89e7058f
AH
15980 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15981 info->ehrd_offset + sp_offset
15982 + reg_size * (int) i,
15983 info->total_size);
83720594
RH
15984 }
15985 }
15986
9ebbca7d 15987 /* Save CR if we use any that must be preserved. */
f57fe068 15988 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15989 {
15990 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15991 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15992 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15993 /* See the large comment above about why CR2_REGNO is used. */
15994 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15995
9ebbca7d
GK
15996 /* If r12 was used to hold the original sp, copy cr into r0 now
15997 that it's free. */
15998 if (REGNO (frame_reg_rtx) == 12)
15999 {
f8a57be8
GK
16000 rtx set;
16001
9ebbca7d 16002 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16003 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16004 RTX_FRAME_RELATED_P (insn) = 1;
16005 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16006 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16007 set,
16008 REG_NOTES (insn));
f676971a 16009
9ebbca7d
GK
16010 }
16011 insn = emit_move_insn (mem, cr_save_rtx);
16012
f676971a 16013 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16014 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16015 }
16016
f676971a 16017 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16018 for which it was done previously. */
f57fe068 16019 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 16020 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 16021 {
bcb2d701 16022 if (info->total_size < 32767)
2b2c2fe5 16023 sp_offset = info->total_size;
bcb2d701
EC
16024 else
16025 frame_reg_rtx = frame_ptr_rtx;
16026 rs6000_emit_allocate_stack (info->total_size,
16027 (frame_reg_rtx != sp_reg_rtx
16028 && ((info->altivec_size != 0)
16029 || (info->vrsave_mask != 0)
16030 )));
16031 if (frame_reg_rtx != sp_reg_rtx)
16032 rs6000_emit_stack_tie ();
2b2c2fe5 16033 }
9ebbca7d
GK
16034
16035 /* Set frame pointer, if needed. */
16036 if (frame_pointer_needed)
16037 {
7d5175e1 16038 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16039 sp_reg_rtx);
16040 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16041 }
9878760c 16042
2b2c2fe5
EC
16043 /* Save AltiVec registers if needed. Save here because the red zone does
16044 not include AltiVec registers. */
16045 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16046 {
16047 int i;
16048
16049 /* There should be a non inline version of this, for when we
16050 are saving lots of vector registers. */
16051 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16052 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16053 {
16054 rtx areg, savereg, mem;
16055 int offset;
16056
16057 offset = info->altivec_save_offset + sp_offset
16058 + 16 * (i - info->first_altivec_reg_save);
16059
16060 savereg = gen_rtx_REG (V4SImode, i);
16061
16062 areg = gen_rtx_REG (Pmode, 0);
16063 emit_move_insn (areg, GEN_INT (offset));
16064
16065 /* AltiVec addressing mode is [reg+reg]. */
16066 mem = gen_frame_mem (V4SImode,
16067 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16068
16069 insn = emit_move_insn (mem, savereg);
16070
16071 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16072 areg, GEN_INT (offset));
16073 }
16074 }
16075
16076 /* VRSAVE is a bit vector representing which AltiVec registers
16077 are used. The OS uses this to determine which vector
16078 registers to save on a context switch. We need to save
16079 VRSAVE on the stack frame, add whatever AltiVec registers we
16080 used in this function, and do the corresponding magic in the
16081 epilogue. */
16082
16083 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16084 && info->vrsave_mask != 0)
16085 {
16086 rtx reg, mem, vrsave;
16087 int offset;
16088
16089 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16090 as frame_reg_rtx and r11 as the static chain pointer for
16091 nested functions. */
16092 reg = gen_rtx_REG (SImode, 0);
16093 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16094 if (TARGET_MACHO)
16095 emit_insn (gen_get_vrsave_internal (reg));
16096 else
16097 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16098
16099 if (!WORLD_SAVE_P (info))
16100 {
16101 /* Save VRSAVE. */
16102 offset = info->vrsave_save_offset + sp_offset;
16103 mem = gen_frame_mem (SImode,
16104 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16105 GEN_INT (offset)));
16106 insn = emit_move_insn (mem, reg);
16107 }
16108
16109 /* Include the registers in the mask. */
16110 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16111
16112 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16113 }
16114
1db02437 16115 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16116 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16117 || (DEFAULT_ABI == ABI_V4
16118 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16119 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16120 {
16121 /* If emit_load_toc_table will use the link register, we need to save
16122 it. We use R12 for this purpose because emit_load_toc_table
16123 can use register 0. This allows us to use a plain 'blr' to return
16124 from the procedure more often. */
16125 int save_LR_around_toc_setup = (TARGET_ELF
16126 && DEFAULT_ABI != ABI_AIX
16127 && flag_pic
16128 && ! info->lr_save_p
16129 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16130 if (save_LR_around_toc_setup)
16131 {
1de43f85 16132 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16133
c4ad648e 16134 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16135 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16136
c4ad648e 16137 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16138
c4ad648e 16139 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16140 RTX_FRAME_RELATED_P (insn) = 1;
16141 }
16142 else
16143 rs6000_emit_load_toc_table (TRUE);
16144 }
ee890fe2 16145
fcce224d 16146#if TARGET_MACHO
ee890fe2
SS
16147 if (DEFAULT_ABI == ABI_DARWIN
16148 && flag_pic && current_function_uses_pic_offset_table)
16149 {
1de43f85 16150 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16151 rtx src = machopic_function_base_sym ();
ee890fe2 16152
6d0a8091
DJ
16153 /* Save and restore LR locally around this call (in R0). */
16154 if (!info->lr_save_p)
6fb5fa3c 16155 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16156
6fb5fa3c 16157 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16158
6fb5fa3c
DB
16159 emit_move_insn (gen_rtx_REG (Pmode,
16160 RS6000_PIC_OFFSET_TABLE_REGNUM),
16161 lr);
6d0a8091
DJ
16162
16163 if (!info->lr_save_p)
6fb5fa3c 16164 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16165 }
fcce224d 16166#endif
9ebbca7d
GK
16167}
16168
9ebbca7d 16169/* Write function prologue. */
a4f6c312 16170
08c148a8 16171static void
f676971a 16172rs6000_output_function_prologue (FILE *file,
a2369ed3 16173 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16174{
16175 rs6000_stack_t *info = rs6000_stack_info ();
16176
4697a36c
MM
16177 if (TARGET_DEBUG_STACK)
16178 debug_stack_info (info);
9878760c 16179
a4f6c312
SS
16180 /* Write .extern for any function we will call to save and restore
16181 fp values. */
16182 if (info->first_fp_reg_save < 64
16183 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16184 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16185 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16186 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16187 RESTORE_FP_SUFFIX);
9878760c 16188
c764f757
RK
16189 /* Write .extern for AIX common mode routines, if needed. */
16190 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16191 {
f6709c70
JW
16192 fputs ("\t.extern __mulh\n", file);
16193 fputs ("\t.extern __mull\n", file);
16194 fputs ("\t.extern __divss\n", file);
16195 fputs ("\t.extern __divus\n", file);
16196 fputs ("\t.extern __quoss\n", file);
16197 fputs ("\t.extern __quous\n", file);
c764f757
RK
16198 common_mode_defined = 1;
16199 }
9878760c 16200
9ebbca7d 16201 if (! HAVE_prologue)
979721f8 16202 {
9ebbca7d 16203 start_sequence ();
9dda4cc8 16204
a4f6c312
SS
16205 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16206 the "toplevel" insn chain. */
2e040219 16207 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16208 rs6000_emit_prologue ();
2e040219 16209 emit_note (NOTE_INSN_DELETED);
178c3eff 16210
a3c9585f 16211 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16212 {
16213 rtx insn;
16214 unsigned addr = 0;
16215 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16216 {
16217 INSN_ADDRESSES_NEW (insn, addr);
16218 addr += 4;
16219 }
16220 }
9dda4cc8 16221
9ebbca7d 16222 if (TARGET_DEBUG_STACK)
a4f6c312 16223 debug_rtx_list (get_insns (), 100);
c9d691e9 16224 final (get_insns (), file, FALSE);
9ebbca7d 16225 end_sequence ();
979721f8
MM
16226 }
16227
9ebbca7d
GK
16228 rs6000_pic_labelno++;
16229}
f676971a 16230
9ebbca7d 16231/* Emit function epilogue as insns.
9878760c 16232
9ebbca7d
GK
16233 At present, dwarf2out_frame_debug_expr doesn't understand
16234 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16235 anywhere in the epilogue. Most of the insns below would in any case
16236 need special notes to explain where r11 is in relation to the stack. */
9878760c 16237
9ebbca7d 16238void
a2369ed3 16239rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16240{
16241 rs6000_stack_t *info;
16242 int restoring_FPRs_inline;
16243 int using_load_multiple;
d296e02e 16244 int using_mtcr_multiple;
9ebbca7d
GK
16245 int use_backchain_to_restore_sp;
16246 int sp_offset = 0;
16247 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16248 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16249 enum machine_mode reg_mode = Pmode;
327e5343 16250 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16251 int i;
16252
c19de7aa
AH
16253 info = rs6000_stack_info ();
16254
16255 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16256 {
16257 reg_mode = V2SImode;
16258 reg_size = 8;
16259 }
16260
9ebbca7d 16261 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16262 && (!TARGET_SPE_ABI
16263 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16264 && info->first_gp_reg_save < 31
16265 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16266 restoring_FPRs_inline = (sibcall
83720594 16267 || current_function_calls_eh_return
9ebbca7d
GK
16268 || info->first_fp_reg_save == 64
16269 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16270 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16271 || current_function_calls_alloca
16272 || info->total_size > 32767);
d296e02e 16273 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16274 || rs6000_cpu == PROCESSOR_PPC603
16275 || rs6000_cpu == PROCESSOR_PPC750
16276 || optimize_size);
16277
f57fe068 16278 if (WORLD_SAVE_P (info))
d62294f5
FJ
16279 {
16280 int i, j;
16281 char rname[30];
16282 const char *alloc_rname;
16283 rtvec p;
16284
16285 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16286 stack slot (which is not likely to be our caller.)
16287 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16288 rest_world is similar, except any R10 parameter is ignored.
16289 The exception-handling stuff that was here in 2.95 is no
16290 longer necessary. */
d62294f5
FJ
16291
16292 p = rtvec_alloc (9
16293 + 1
f676971a 16294 + 32 - info->first_gp_reg_save
c4ad648e
AM
16295 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16296 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16297
c4ad648e
AM
16298 strcpy (rname, ((current_function_calls_eh_return) ?
16299 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16300 alloc_rname = ggc_strdup (rname);
16301
16302 j = 0;
16303 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16304 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16305 gen_rtx_REG (Pmode,
1de43f85 16306 LR_REGNO));
d62294f5 16307 RTVEC_ELT (p, j++)
c4ad648e 16308 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16309 /* The instruction pattern requires a clobber here;
c4ad648e 16310 it is shared with the restVEC helper. */
d62294f5 16311 RTVEC_ELT (p, j++)
c4ad648e 16312 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16313
16314 {
c4ad648e
AM
16315 /* CR register traditionally saved as CR2. */
16316 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16317 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16318 GEN_INT (info->cr_save_offset));
0be76840 16319 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16320
16321 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16322 }
16323
16324 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16325 {
16326 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16327 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16328 GEN_INT (info->gp_save_offset
16329 + reg_size * i));
0be76840 16330 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16331
16332 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16333 }
d62294f5 16334 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16335 {
16336 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16337 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16338 GEN_INT (info->altivec_save_offset
16339 + 16 * i));
0be76840 16340 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16341
16342 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16343 }
d62294f5 16344 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16345 {
16346 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16347 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16348 GEN_INT (info->fp_save_offset
16349 + 8 * i));
0be76840 16350 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16351
16352 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16353 }
d62294f5 16354 RTVEC_ELT (p, j++)
c4ad648e 16355 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16356 RTVEC_ELT (p, j++)
c4ad648e 16357 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16358 RTVEC_ELT (p, j++)
c4ad648e 16359 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16360 RTVEC_ELT (p, j++)
c4ad648e 16361 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16362 RTVEC_ELT (p, j++)
c4ad648e 16363 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16364 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16365
16366 return;
16367 }
16368
45b194f8
AM
16369 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16370 if (info->push_p)
2b2c2fe5 16371 sp_offset = info->total_size;
f676971a 16372
e6477eaa
AM
16373 /* Restore AltiVec registers if we must do so before adjusting the
16374 stack. */
16375 if (TARGET_ALTIVEC_ABI
16376 && info->altivec_size != 0
16377 && DEFAULT_ABI != ABI_V4
16378 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))
9aa86737
AH
16379 {
16380 int i;
16381
e6477eaa
AM
16382 if (use_backchain_to_restore_sp)
16383 {
16384 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16385 emit_move_insn (frame_reg_rtx,
16386 gen_rtx_MEM (Pmode, sp_reg_rtx));
16387 sp_offset = 0;
16388 }
16389
9aa86737
AH
16390 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16391 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16392 {
16393 rtx addr, areg, mem;
16394
16395 areg = gen_rtx_REG (Pmode, 0);
16396 emit_move_insn
16397 (areg, GEN_INT (info->altivec_save_offset
16398 + sp_offset
16399 + 16 * (i - info->first_altivec_reg_save)));
16400
16401 /* AltiVec addressing mode is [reg+reg]. */
16402 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16403 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16404
16405 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16406 }
16407 }
16408
e6477eaa
AM
16409 /* Restore VRSAVE if we must do so before adjusting the stack. */
16410 if (TARGET_ALTIVEC
16411 && TARGET_ALTIVEC_VRSAVE
16412 && info->vrsave_mask != 0
16413 && DEFAULT_ABI != ABI_V4
16414 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))
16415 {
16416 rtx addr, mem, reg;
16417
16418 if (use_backchain_to_restore_sp
16419 && frame_reg_rtx == sp_reg_rtx)
16420 {
16421 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16422 emit_move_insn (frame_reg_rtx,
16423 gen_rtx_MEM (Pmode, sp_reg_rtx));
16424 sp_offset = 0;
16425 }
16426
16427 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16428 GEN_INT (info->vrsave_save_offset + sp_offset));
16429 mem = gen_frame_mem (SImode, addr);
16430 reg = gen_rtx_REG (SImode, 12);
16431 emit_move_insn (reg, mem);
16432
16433 emit_insn (generate_set_vrsave (reg, info, 1));
16434 }
16435
2b2c2fe5
EC
16436 /* If we have a frame pointer, a call to alloca, or a large stack
16437 frame, restore the old stack pointer using the backchain. Otherwise,
16438 we know what size to update it with. */
16439 if (use_backchain_to_restore_sp)
16440 {
e6477eaa
AM
16441 if (frame_reg_rtx != sp_reg_rtx)
16442 {
16443 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
16444 frame_reg_rtx = sp_reg_rtx;
16445 }
16446 else
16447 {
16448 /* Under V.4, don't reset the stack pointer until after we're done
16449 loading the saved registers. */
16450 if (DEFAULT_ABI == ABI_V4)
16451 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16452
16453 emit_move_insn (frame_reg_rtx,
16454 gen_rtx_MEM (Pmode, sp_reg_rtx));
16455 sp_offset = 0;
16456 }
2b2c2fe5 16457 }
45b194f8
AM
16458 else if (info->push_p
16459 && DEFAULT_ABI != ABI_V4
16460 && !current_function_calls_eh_return)
2b2c2fe5 16461 {
45b194f8
AM
16462 emit_insn (TARGET_32BIT
16463 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16464 GEN_INT (info->total_size))
16465 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16466 GEN_INT (info->total_size)));
16467 sp_offset = 0;
2b2c2fe5
EC
16468 }
16469
e6477eaa
AM
16470 /* Restore AltiVec registers if we have not done so already. */
16471 if (TARGET_ALTIVEC_ABI
16472 && info->altivec_size != 0
16473 && (DEFAULT_ABI == ABI_V4
16474 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
16475 {
16476 int i;
16477
16478 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16479 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16480 {
16481 rtx addr, areg, mem;
16482
16483 areg = gen_rtx_REG (Pmode, 0);
16484 emit_move_insn
16485 (areg, GEN_INT (info->altivec_save_offset
16486 + sp_offset
16487 + 16 * (i - info->first_altivec_reg_save)));
16488
16489 /* AltiVec addressing mode is [reg+reg]. */
16490 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
16491 mem = gen_frame_mem (V4SImode, addr);
16492
16493 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16494 }
16495 }
16496
16497 /* Restore VRSAVE if we have not done so already. */
16498 if (TARGET_ALTIVEC
16499 && TARGET_ALTIVEC_VRSAVE
16500 && info->vrsave_mask != 0
16501 && (DEFAULT_ABI == ABI_V4
16502 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
16503 {
16504 rtx addr, mem, reg;
16505
16506 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16507 GEN_INT (info->vrsave_save_offset + sp_offset));
16508 mem = gen_frame_mem (SImode, addr);
16509 reg = gen_rtx_REG (SImode, 12);
16510 emit_move_insn (reg, mem);
16511
16512 emit_insn (generate_set_vrsave (reg, info, 1));
16513 }
16514
9ebbca7d
GK
16515 /* Get the old lr if we saved it. */
16516 if (info->lr_save_p)
b6c9286a 16517 {
a3170dc6
AH
16518 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16519 info->lr_save_offset + sp_offset);
ba4828e0 16520
9ebbca7d 16521 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16522 }
f676971a 16523
9ebbca7d
GK
16524 /* Get the old cr if we saved it. */
16525 if (info->cr_save_p)
16526 {
16527 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16528 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16529 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16530
9ebbca7d
GK
16531 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16532 }
f676971a 16533
9ebbca7d 16534 /* Set LR here to try to overlap restores below. */
4697a36c 16535 if (info->lr_save_p)
1de43f85 16536 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16537 gen_rtx_REG (Pmode, 0));
f676971a 16538
83720594
RH
16539 /* Load exception handler data registers, if needed. */
16540 if (current_function_calls_eh_return)
16541 {
78e1b90d
DE
16542 unsigned int i, regno;
16543
fc4767bb
JJ
16544 if (TARGET_AIX)
16545 {
16546 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16547 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16548 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16549
16550 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16551 }
16552
83720594
RH
16553 for (i = 0; ; ++i)
16554 {
a3170dc6 16555 rtx mem;
83720594
RH
16556
16557 regno = EH_RETURN_DATA_REGNO (i);
16558 if (regno == INVALID_REGNUM)
16559 break;
16560
a3170dc6
AH
16561 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16562 info->ehrd_offset + sp_offset
16563 + reg_size * (int) i);
83720594
RH
16564
16565 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16566 }
16567 }
f676971a 16568
9ebbca7d
GK
16569 /* Restore GPRs. This is done as a PARALLEL if we are using
16570 the load-multiple instructions. */
16571 if (using_load_multiple)
979721f8 16572 {
9ebbca7d
GK
16573 rtvec p;
16574 p = rtvec_alloc (32 - info->first_gp_reg_save);
16575 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16576 {
f676971a
EC
16577 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16578 GEN_INT (info->gp_save_offset
16579 + sp_offset
9ebbca7d 16580 + reg_size * i));
0be76840 16581 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16582
f676971a 16583 RTVEC_ELT (p, i) =
9ebbca7d
GK
16584 gen_rtx_SET (VOIDmode,
16585 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16586 mem);
979721f8 16587 }
9ebbca7d 16588 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16589 }
52ff33d0
NF
16590 else if (TARGET_SPE_ABI
16591 && info->spe_64bit_regs_used != 0
16592 && info->first_gp_reg_save != 32)
16593 {
52ff33d0
NF
16594 /* Determine whether we can address all of the registers that need
16595 to be saved with an offset from the stack pointer that fits in
16596 the small const field for SPE memory instructions. */
16597 int spe_regs_addressable_via_sp
16598 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16599 + (32 - info->first_gp_reg_save - 1) * reg_size);
16600 int spe_offset;
16601
16602 if (spe_regs_addressable_via_sp)
45b194f8 16603 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16604 else
16605 {
45b194f8 16606 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16607 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16608 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16609 There's no need to worry here because the static chain is passed
16610 anew to every function. */
45b194f8
AM
16611 if (frame_reg_rtx == sp_reg_rtx)
16612 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16613 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16614 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16615 /* Keep the invariant that frame_reg_rtx + sp_offset points
16616 at the top of the stack frame. */
16617 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16618
16619 spe_offset = 0;
16620 }
16621
16622 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16623 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16624 {
16625 rtx offset, addr, mem;
16626
16627 /* We're doing all this to ensure that the immediate offset
16628 fits into the immediate field of 'evldd'. */
16629 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16630
16631 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16632 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16633 mem = gen_rtx_MEM (V2SImode, addr);
16634
16635 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16636 mem);
16637 }
16638 }
9ebbca7d
GK
16639 else
16640 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16641 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16642 {
f676971a
EC
16643 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16644 GEN_INT (info->gp_save_offset
16645 + sp_offset
9ebbca7d 16646 + reg_size * i));
0be76840 16647 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16648
f676971a 16649 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16650 info->first_gp_reg_save + i), mem);
9ebbca7d 16651 }
9878760c 16652
9ebbca7d
GK
16653 /* Restore fpr's if we need to do it without calling a function. */
16654 if (restoring_FPRs_inline)
16655 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16656 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16657 && ! call_used_regs[info->first_fp_reg_save+i]))
16658 {
16659 rtx addr, mem;
16660 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16661 GEN_INT (info->fp_save_offset
16662 + sp_offset
a4f6c312 16663 + 8 * i));
0be76840 16664 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16665
f676971a 16666 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16667 info->first_fp_reg_save + i),
16668 mem);
16669 }
8d30c4ee 16670
9ebbca7d
GK
16671 /* If we saved cr, restore it here. Just those that were used. */
16672 if (info->cr_save_p)
979721f8 16673 {
9ebbca7d 16674 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16675 int count = 0;
f676971a 16676
d296e02e 16677 if (using_mtcr_multiple)
979721f8 16678 {
9ebbca7d 16679 for (i = 0; i < 8; i++)
6fb5fa3c 16680 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16681 count++;
37409796 16682 gcc_assert (count);
e35b9579
GK
16683 }
16684
d296e02e 16685 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16686 {
16687 rtvec p;
16688 int ndx;
f676971a 16689
e35b9579 16690 p = rtvec_alloc (count);
9ebbca7d 16691
e35b9579 16692 ndx = 0;
9ebbca7d 16693 for (i = 0; i < 8; i++)
6fb5fa3c 16694 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16695 {
16696 rtvec r = rtvec_alloc (2);
16697 RTVEC_ELT (r, 0) = r12_rtx;
16698 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16699 RTVEC_ELT (p, ndx) =
f676971a 16700 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16701 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16702 ndx++;
9ebbca7d
GK
16703 }
16704 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16705 gcc_assert (ndx == count);
979721f8
MM
16706 }
16707 else
9ebbca7d 16708 for (i = 0; i < 8; i++)
6fb5fa3c 16709 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16710 {
f676971a 16711 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16712 CR0_REGNO+i),
16713 r12_rtx));
979721f8 16714 }
979721f8
MM
16715 }
16716
9ebbca7d 16717 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16718 have been done. */
16719 if (frame_reg_rtx != sp_reg_rtx)
16720 {
16721 /* This blockage is needed so that sched doesn't decide to move
16722 the sp change before the register restores. */
16723 rs6000_emit_stack_tie ();
45b194f8
AM
16724 if (sp_offset != 0)
16725 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16726 GEN_INT (sp_offset)));
52ff33d0
NF
16727 else
16728 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16729 }
16730 else if (sp_offset != 0)
16731 emit_insn (TARGET_32BIT
16732 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16733 GEN_INT (sp_offset))
16734 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16735 GEN_INT (sp_offset)));
b6c9286a 16736
83720594
RH
16737 if (current_function_calls_eh_return)
16738 {
16739 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16740 emit_insn (TARGET_32BIT
83720594
RH
16741 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16742 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16743 }
16744
9ebbca7d
GK
16745 if (!sibcall)
16746 {
16747 rtvec p;
16748 if (! restoring_FPRs_inline)
16749 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16750 else
16751 p = rtvec_alloc (2);
b6c9286a 16752
e35b9579 16753 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16754 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16755 gen_rtx_REG (Pmode,
1de43f85 16756 LR_REGNO));
9ebbca7d
GK
16757
16758 /* If we have to restore more than two FP registers, branch to the
16759 restore function. It will return to our caller. */
16760 if (! restoring_FPRs_inline)
16761 {
16762 int i;
16763 char rname[30];
520a57c8 16764 const char *alloc_rname;
979721f8 16765
f676971a 16766 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16767 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16768 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16769 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16770 gen_rtx_SYMBOL_REF (Pmode,
16771 alloc_rname));
b6c9286a 16772
9ebbca7d
GK
16773 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16774 {
16775 rtx addr, mem;
16776 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16777 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16778 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16779
f676971a 16780 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16781 gen_rtx_SET (VOIDmode,
16782 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16783 mem);
b6c9286a
MM
16784 }
16785 }
f676971a 16786
9ebbca7d 16787 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16788 }
9878760c
RK
16789}
16790
16791/* Write function epilogue. */
16792
08c148a8 16793static void
f676971a 16794rs6000_output_function_epilogue (FILE *file,
a2369ed3 16795 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16796{
9ebbca7d 16797 if (! HAVE_epilogue)
9878760c 16798 {
9ebbca7d
GK
16799 rtx insn = get_last_insn ();
16800 /* If the last insn was a BARRIER, we don't have to write anything except
16801 the trace table. */
16802 if (GET_CODE (insn) == NOTE)
16803 insn = prev_nonnote_insn (insn);
16804 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16805 {
9ebbca7d
GK
16806 /* This is slightly ugly, but at least we don't have two
16807 copies of the epilogue-emitting code. */
16808 start_sequence ();
16809
16810 /* A NOTE_INSN_DELETED is supposed to be at the start
16811 and end of the "toplevel" insn chain. */
2e040219 16812 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16813 rs6000_emit_epilogue (FALSE);
2e040219 16814 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16815
a3c9585f 16816 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16817 {
16818 rtx insn;
16819 unsigned addr = 0;
16820 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16821 {
16822 INSN_ADDRESSES_NEW (insn, addr);
16823 addr += 4;
16824 }
16825 }
16826
9ebbca7d 16827 if (TARGET_DEBUG_STACK)
a4f6c312 16828 debug_rtx_list (get_insns (), 100);
c9d691e9 16829 final (get_insns (), file, FALSE);
9ebbca7d 16830 end_sequence ();
4697a36c 16831 }
9878760c 16832 }
b4ac57ab 16833
efdba735
SH
16834#if TARGET_MACHO
16835 macho_branch_islands ();
0e5da0be
GK
16836 /* Mach-O doesn't support labels at the end of objects, so if
16837 it looks like we might want one, insert a NOP. */
16838 {
16839 rtx insn = get_last_insn ();
16840 while (insn
16841 && NOTE_P (insn)
a38e7aa5 16842 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16843 insn = PREV_INSN (insn);
f676971a
EC
16844 if (insn
16845 && (LABEL_P (insn)
0e5da0be 16846 || (NOTE_P (insn)
a38e7aa5 16847 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16848 fputs ("\tnop\n", file);
16849 }
16850#endif
16851
9b30bae2 16852 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16853 on its format.
16854
16855 We don't output a traceback table if -finhibit-size-directive was
16856 used. The documentation for -finhibit-size-directive reads
16857 ``don't output a @code{.size} assembler directive, or anything
16858 else that would cause trouble if the function is split in the
16859 middle, and the two halves are placed at locations far apart in
16860 memory.'' The traceback table has this property, since it
16861 includes the offset from the start of the function to the
4d30c363
MM
16862 traceback table itself.
16863
16864 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16865 different traceback table. */
57ac7be9 16866 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16867 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16868 {
69c75916 16869 const char *fname = NULL;
3ac88239 16870 const char *language_string = lang_hooks.name;
6041bf2f 16871 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16872 int i;
57ac7be9 16873 int optional_tbtab;
8097c268 16874 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16875
16876 if (rs6000_traceback == traceback_full)
16877 optional_tbtab = 1;
16878 else if (rs6000_traceback == traceback_part)
16879 optional_tbtab = 0;
16880 else
16881 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16882
69c75916
AM
16883 if (optional_tbtab)
16884 {
16885 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16886 while (*fname == '.') /* V.4 encodes . in the name */
16887 fname++;
16888
16889 /* Need label immediately before tbtab, so we can compute
16890 its offset from the function start. */
16891 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16892 ASM_OUTPUT_LABEL (file, fname);
16893 }
314fc5a9
ILT
16894
16895 /* The .tbtab pseudo-op can only be used for the first eight
16896 expressions, since it can't handle the possibly variable
16897 length fields that follow. However, if you omit the optional
16898 fields, the assembler outputs zeros for all optional fields
16899 anyways, giving each variable length field is minimum length
16900 (as defined in sys/debug.h). Thus we can not use the .tbtab
16901 pseudo-op at all. */
16902
16903 /* An all-zero word flags the start of the tbtab, for debuggers
16904 that have to find it by searching forward from the entry
16905 point or from the current pc. */
19d2d16f 16906 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16907
16908 /* Tbtab format type. Use format type 0. */
19d2d16f 16909 fputs ("\t.byte 0,", file);
314fc5a9 16910
5fc921c1
DE
16911 /* Language type. Unfortunately, there does not seem to be any
16912 official way to discover the language being compiled, so we
16913 use language_string.
16914 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16915 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16916 a number, so for now use 9. */
5fc921c1 16917 if (! strcmp (language_string, "GNU C"))
314fc5a9 16918 i = 0;
6de9cd9a 16919 else if (! strcmp (language_string, "GNU F77")
7f62878c 16920 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 16921 i = 1;
8b83775b 16922 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16923 i = 2;
5fc921c1
DE
16924 else if (! strcmp (language_string, "GNU Ada"))
16925 i = 3;
56438901
AM
16926 else if (! strcmp (language_string, "GNU C++")
16927 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16928 i = 9;
9517ead8
AG
16929 else if (! strcmp (language_string, "GNU Java"))
16930 i = 13;
5fc921c1
DE
16931 else if (! strcmp (language_string, "GNU Objective-C"))
16932 i = 14;
314fc5a9 16933 else
37409796 16934 gcc_unreachable ();
314fc5a9
ILT
16935 fprintf (file, "%d,", i);
16936
16937 /* 8 single bit fields: global linkage (not set for C extern linkage,
16938 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16939 from start of procedure stored in tbtab, internal function, function
16940 has controlled storage, function has no toc, function uses fp,
16941 function logs/aborts fp operations. */
16942 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16943 fprintf (file, "%d,",
16944 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16945
16946 /* 6 bitfields: function is interrupt handler, name present in
16947 proc table, function calls alloca, on condition directives
16948 (controls stack walks, 3 bits), saves condition reg, saves
16949 link reg. */
16950 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16951 set up as a frame pointer, even when there is no alloca call. */
16952 fprintf (file, "%d,",
6041bf2f
DE
16953 ((optional_tbtab << 6)
16954 | ((optional_tbtab & frame_pointer_needed) << 5)
16955 | (info->cr_save_p << 1)
16956 | (info->lr_save_p)));
314fc5a9 16957
6041bf2f 16958 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16959 (6 bits). */
16960 fprintf (file, "%d,",
4697a36c 16961 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16962
16963 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16964 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16965
6041bf2f
DE
16966 if (optional_tbtab)
16967 {
16968 /* Compute the parameter info from the function decl argument
16969 list. */
16970 tree decl;
16971 int next_parm_info_bit = 31;
314fc5a9 16972
6041bf2f
DE
16973 for (decl = DECL_ARGUMENTS (current_function_decl);
16974 decl; decl = TREE_CHAIN (decl))
16975 {
16976 rtx parameter = DECL_INCOMING_RTL (decl);
16977 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16978
6041bf2f
DE
16979 if (GET_CODE (parameter) == REG)
16980 {
ebb109ad 16981 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16982 {
16983 int bits;
16984
16985 float_parms++;
16986
37409796
NS
16987 switch (mode)
16988 {
16989 case SFmode:
e41b2a33 16990 case SDmode:
37409796
NS
16991 bits = 0x2;
16992 break;
16993
16994 case DFmode:
7393f7f8 16995 case DDmode:
37409796 16996 case TFmode:
7393f7f8 16997 case TDmode:
37409796
NS
16998 bits = 0x3;
16999 break;
17000
17001 default:
17002 gcc_unreachable ();
17003 }
6041bf2f
DE
17004
17005 /* If only one bit will fit, don't or in this entry. */
17006 if (next_parm_info_bit > 0)
17007 parm_info |= (bits << (next_parm_info_bit - 1));
17008 next_parm_info_bit -= 2;
17009 }
17010 else
17011 {
17012 fixed_parms += ((GET_MODE_SIZE (mode)
17013 + (UNITS_PER_WORD - 1))
17014 / UNITS_PER_WORD);
17015 next_parm_info_bit -= 1;
17016 }
17017 }
17018 }
17019 }
314fc5a9
ILT
17020
17021 /* Number of fixed point parameters. */
17022 /* This is actually the number of words of fixed point parameters; thus
17023 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17024 fprintf (file, "%d,", fixed_parms);
17025
17026 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17027 all on stack. */
17028 /* This is actually the number of fp registers that hold parameters;
17029 and thus the maximum value is 13. */
17030 /* Set parameters on stack bit if parameters are not in their original
17031 registers, regardless of whether they are on the stack? Xlc
17032 seems to set the bit when not optimizing. */
17033 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17034
6041bf2f
DE
17035 if (! optional_tbtab)
17036 return;
17037
314fc5a9
ILT
17038 /* Optional fields follow. Some are variable length. */
17039
17040 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17041 11 double float. */
17042 /* There is an entry for each parameter in a register, in the order that
17043 they occur in the parameter list. Any intervening arguments on the
17044 stack are ignored. If the list overflows a long (max possible length
17045 34 bits) then completely leave off all elements that don't fit. */
17046 /* Only emit this long if there was at least one parameter. */
17047 if (fixed_parms || float_parms)
17048 fprintf (file, "\t.long %d\n", parm_info);
17049
17050 /* Offset from start of code to tb table. */
19d2d16f 17051 fputs ("\t.long ", file);
314fc5a9 17052 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17053 if (TARGET_AIX)
17054 RS6000_OUTPUT_BASENAME (file, fname);
17055 else
17056 assemble_name (file, fname);
17057 putc ('-', file);
17058 rs6000_output_function_entry (file, fname);
19d2d16f 17059 putc ('\n', file);
314fc5a9
ILT
17060
17061 /* Interrupt handler mask. */
17062 /* Omit this long, since we never set the interrupt handler bit
17063 above. */
17064
17065 /* Number of CTL (controlled storage) anchors. */
17066 /* Omit this long, since the has_ctl bit is never set above. */
17067
17068 /* Displacement into stack of each CTL anchor. */
17069 /* Omit this list of longs, because there are no CTL anchors. */
17070
17071 /* Length of function name. */
69c75916
AM
17072 if (*fname == '*')
17073 ++fname;
296b8152 17074 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17075
17076 /* Function name. */
17077 assemble_string (fname, strlen (fname));
17078
17079 /* Register for alloca automatic storage; this is always reg 31.
17080 Only emit this if the alloca bit was set above. */
17081 if (frame_pointer_needed)
19d2d16f 17082 fputs ("\t.byte 31\n", file);
b1765bde
DE
17083
17084 fputs ("\t.align 2\n", file);
9b30bae2 17085 }
9878760c 17086}
17167fd8 17087\f
a4f6c312
SS
17088/* A C compound statement that outputs the assembler code for a thunk
17089 function, used to implement C++ virtual function calls with
17090 multiple inheritance. The thunk acts as a wrapper around a virtual
17091 function, adjusting the implicit object parameter before handing
17092 control off to the real function.
17093
17094 First, emit code to add the integer DELTA to the location that
17095 contains the incoming first argument. Assume that this argument
17096 contains a pointer, and is the one used to pass the `this' pointer
17097 in C++. This is the incoming argument *before* the function
17098 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17099 values of all other incoming arguments.
17167fd8
MM
17100
17101 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17102 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17103 not touch the return address. Hence returning from FUNCTION will
17104 return to whoever called the current `thunk'.
17167fd8 17105
a4f6c312
SS
17106 The effect must be as if FUNCTION had been called directly with the
17107 adjusted first argument. This macro is responsible for emitting
17108 all of the code for a thunk function; output_function_prologue()
17109 and output_function_epilogue() are not invoked.
17167fd8 17110
a4f6c312
SS
17111 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17112 been extracted from it.) It might possibly be useful on some
17113 targets, but probably not.
17167fd8 17114
a4f6c312
SS
17115 If you do not define this macro, the target-independent code in the
17116 C++ frontend will generate a less efficient heavyweight thunk that
17117 calls FUNCTION instead of jumping to it. The generic approach does
17118 not support varargs. */
17167fd8 17119
3961e8fe 17120static void
f676971a
EC
17121rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17122 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17123 tree function)
17167fd8 17124{
5b71a4e7 17125 rtx this, insn, funexp;
17167fd8 17126
5b71a4e7 17127 reload_completed = 1;
fe3ad572 17128 epilogue_completed = 1;
56a7189a 17129
5b71a4e7 17130 /* Mark the end of the (empty) prologue. */
2e040219 17131 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17132
5b71a4e7
DE
17133 /* Find the "this" pointer. If the function returns a structure,
17134 the structure return pointer is in r3. */
61f71b34 17135 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17136 this = gen_rtx_REG (Pmode, 4);
56a7189a 17137 else
5b71a4e7 17138 this = gen_rtx_REG (Pmode, 3);
17167fd8 17139
5b71a4e7
DE
17140 /* Apply the constant offset, if required. */
17141 if (delta)
17142 {
17143 rtx delta_rtx = GEN_INT (delta);
17144 emit_insn (TARGET_32BIT
17145 ? gen_addsi3 (this, this, delta_rtx)
17146 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17147 }
17148
5b71a4e7
DE
17149 /* Apply the offset from the vtable, if required. */
17150 if (vcall_offset)
17167fd8 17151 {
5b71a4e7
DE
17152 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17153 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17154
5b71a4e7 17155 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17156 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17157 {
17158 emit_insn (TARGET_32BIT
17159 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17160 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17161 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17162 }
17163 else
17164 {
17165 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17166
17167 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17168 }
5b71a4e7
DE
17169 emit_insn (TARGET_32BIT
17170 ? gen_addsi3 (this, this, tmp)
17171 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17172 }
17173
5b71a4e7
DE
17174 /* Generate a tail call to the target function. */
17175 if (!TREE_USED (function))
17176 {
17177 assemble_external (function);
17178 TREE_USED (function) = 1;
17179 }
17180 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17181 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17182
17183#if TARGET_MACHO
ab82a49f 17184 if (MACHOPIC_INDIRECT)
5b71a4e7 17185 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17186#endif
5b71a4e7
DE
17187
17188 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17189 generate sibcall RTL explicitly. */
5b71a4e7
DE
17190 insn = emit_call_insn (
17191 gen_rtx_PARALLEL (VOIDmode,
17192 gen_rtvec (4,
17193 gen_rtx_CALL (VOIDmode,
17194 funexp, const0_rtx),
17195 gen_rtx_USE (VOIDmode, const0_rtx),
17196 gen_rtx_USE (VOIDmode,
17197 gen_rtx_REG (SImode,
1de43f85 17198 LR_REGNO)),
5b71a4e7
DE
17199 gen_rtx_RETURN (VOIDmode))));
17200 SIBLING_CALL_P (insn) = 1;
17201 emit_barrier ();
17202
17203 /* Run just enough of rest_of_compilation to get the insns emitted.
17204 There's not really enough bulk here to make other passes such as
17205 instruction scheduling worth while. Note that use_thunk calls
17206 assemble_start_function and assemble_end_function. */
17207 insn = get_insns ();
55e092c4 17208 insn_locators_alloc ();
5b71a4e7
DE
17209 shorten_branches (insn);
17210 final_start_function (insn, file, 1);
c9d691e9 17211 final (insn, file, 1);
5b71a4e7 17212 final_end_function ();
d7087dd2 17213 free_after_compilation (cfun);
5b71a4e7
DE
17214
17215 reload_completed = 0;
fe3ad572 17216 epilogue_completed = 0;
9ebbca7d 17217}
9ebbca7d
GK
17218\f
17219/* A quick summary of the various types of 'constant-pool tables'
17220 under PowerPC:
17221
f676971a 17222 Target Flags Name One table per
9ebbca7d
GK
17223 AIX (none) AIX TOC object file
17224 AIX -mfull-toc AIX TOC object file
17225 AIX -mminimal-toc AIX minimal TOC translation unit
17226 SVR4/EABI (none) SVR4 SDATA object file
17227 SVR4/EABI -fpic SVR4 pic object file
17228 SVR4/EABI -fPIC SVR4 PIC translation unit
17229 SVR4/EABI -mrelocatable EABI TOC function
17230 SVR4/EABI -maix AIX TOC object file
f676971a 17231 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17232 AIX minimal TOC translation unit
17233
17234 Name Reg. Set by entries contains:
17235 made by addrs? fp? sum?
17236
17237 AIX TOC 2 crt0 as Y option option
17238 AIX minimal TOC 30 prolog gcc Y Y option
17239 SVR4 SDATA 13 crt0 gcc N Y N
17240 SVR4 pic 30 prolog ld Y not yet N
17241 SVR4 PIC 30 prolog gcc Y option option
17242 EABI TOC 30 prolog gcc Y option option
17243
17244*/
17245
9ebbca7d
GK
17246/* Hash functions for the hash table. */
17247
17248static unsigned
a2369ed3 17249rs6000_hash_constant (rtx k)
9ebbca7d 17250{
46b33600
RH
17251 enum rtx_code code = GET_CODE (k);
17252 enum machine_mode mode = GET_MODE (k);
17253 unsigned result = (code << 3) ^ mode;
17254 const char *format;
17255 int flen, fidx;
f676971a 17256
46b33600
RH
17257 format = GET_RTX_FORMAT (code);
17258 flen = strlen (format);
17259 fidx = 0;
9ebbca7d 17260
46b33600
RH
17261 switch (code)
17262 {
17263 case LABEL_REF:
17264 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17265
17266 case CONST_DOUBLE:
17267 if (mode != VOIDmode)
17268 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17269 flen = 2;
17270 break;
17271
17272 case CODE_LABEL:
17273 fidx = 3;
17274 break;
17275
17276 default:
17277 break;
17278 }
9ebbca7d
GK
17279
17280 for (; fidx < flen; fidx++)
17281 switch (format[fidx])
17282 {
17283 case 's':
17284 {
17285 unsigned i, len;
17286 const char *str = XSTR (k, fidx);
17287 len = strlen (str);
17288 result = result * 613 + len;
17289 for (i = 0; i < len; i++)
17290 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17291 break;
17292 }
9ebbca7d
GK
17293 case 'u':
17294 case 'e':
17295 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17296 break;
17297 case 'i':
17298 case 'n':
17299 result = result * 613 + (unsigned) XINT (k, fidx);
17300 break;
17301 case 'w':
17302 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17303 result = result * 613 + (unsigned) XWINT (k, fidx);
17304 else
17305 {
17306 size_t i;
9390387d 17307 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17308 result = result * 613 + (unsigned) (XWINT (k, fidx)
17309 >> CHAR_BIT * i);
17310 }
17311 break;
09501938
DE
17312 case '0':
17313 break;
9ebbca7d 17314 default:
37409796 17315 gcc_unreachable ();
9ebbca7d 17316 }
46b33600 17317
9ebbca7d
GK
17318 return result;
17319}
17320
17321static unsigned
a2369ed3 17322toc_hash_function (const void *hash_entry)
9ebbca7d 17323{
f676971a 17324 const struct toc_hash_struct *thc =
a9098fd0
GK
17325 (const struct toc_hash_struct *) hash_entry;
17326 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17327}
17328
17329/* Compare H1 and H2 for equivalence. */
17330
17331static int
a2369ed3 17332toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17333{
17334 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17335 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17336
a9098fd0
GK
17337 if (((const struct toc_hash_struct *) h1)->key_mode
17338 != ((const struct toc_hash_struct *) h2)->key_mode)
17339 return 0;
17340
5692c7bc 17341 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17342}
17343
28e510bd
MM
17344/* These are the names given by the C++ front-end to vtables, and
17345 vtable-like objects. Ideally, this logic should not be here;
17346 instead, there should be some programmatic way of inquiring as
17347 to whether or not an object is a vtable. */
17348
17349#define VTABLE_NAME_P(NAME) \
9390387d 17350 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17351 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17352 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17353 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17354 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17355
17356void
a2369ed3 17357rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17358{
17359 /* Currently C++ toc references to vtables can be emitted before it
17360 is decided whether the vtable is public or private. If this is
17361 the case, then the linker will eventually complain that there is
f676971a 17362 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17363 we emit the TOC reference to reference the symbol and not the
17364 section. */
17365 const char *name = XSTR (x, 0);
54ee9799 17366
f676971a 17367 if (VTABLE_NAME_P (name))
54ee9799
DE
17368 {
17369 RS6000_OUTPUT_BASENAME (file, name);
17370 }
17371 else
17372 assemble_name (file, name);
28e510bd
MM
17373}
17374
a4f6c312
SS
17375/* Output a TOC entry. We derive the entry name from what is being
17376 written. */
9878760c
RK
17377
17378void
a2369ed3 17379output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17380{
17381 char buf[256];
3cce094d 17382 const char *name = buf;
ec940faa 17383 const char *real_name;
9878760c 17384 rtx base = x;
16fdeb48 17385 HOST_WIDE_INT offset = 0;
9878760c 17386
37409796 17387 gcc_assert (!TARGET_NO_TOC);
4697a36c 17388
9ebbca7d
GK
17389 /* When the linker won't eliminate them, don't output duplicate
17390 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17391 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17392 CODE_LABELs. */
17393 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17394 {
17395 struct toc_hash_struct *h;
17396 void * * found;
f676971a 17397
17211ab5 17398 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17399 time because GGC is not initialized at that point. */
17211ab5 17400 if (toc_hash_table == NULL)
f676971a 17401 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17402 toc_hash_eq, NULL);
17403
9ebbca7d
GK
17404 h = ggc_alloc (sizeof (*h));
17405 h->key = x;
a9098fd0 17406 h->key_mode = mode;
9ebbca7d 17407 h->labelno = labelno;
f676971a 17408
9ebbca7d
GK
17409 found = htab_find_slot (toc_hash_table, h, 1);
17410 if (*found == NULL)
17411 *found = h;
f676971a 17412 else /* This is indeed a duplicate.
9ebbca7d
GK
17413 Set this label equal to that label. */
17414 {
17415 fputs ("\t.set ", file);
17416 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17417 fprintf (file, "%d,", labelno);
17418 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17419 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17420 found)->labelno));
17421 return;
17422 }
17423 }
17424
17425 /* If we're going to put a double constant in the TOC, make sure it's
17426 aligned properly when strict alignment is on. */
ff1720ed
RK
17427 if (GET_CODE (x) == CONST_DOUBLE
17428 && STRICT_ALIGNMENT
a9098fd0 17429 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17430 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17431 ASM_OUTPUT_ALIGN (file, 3);
17432 }
17433
4977bab6 17434 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17435
37c37a57
RK
17436 /* Handle FP constants specially. Note that if we have a minimal
17437 TOC, things we put here aren't actually in the TOC, so we can allow
17438 FP constants. */
00b79d54
BE
17439 if (GET_CODE (x) == CONST_DOUBLE &&
17440 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17441 {
17442 REAL_VALUE_TYPE rv;
17443 long k[4];
17444
17445 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17446 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17447 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17448 else
17449 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17450
17451 if (TARGET_64BIT)
17452 {
17453 if (TARGET_MINIMAL_TOC)
17454 fputs (DOUBLE_INT_ASM_OP, file);
17455 else
17456 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17457 k[0] & 0xffffffff, k[1] & 0xffffffff,
17458 k[2] & 0xffffffff, k[3] & 0xffffffff);
17459 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17460 k[0] & 0xffffffff, k[1] & 0xffffffff,
17461 k[2] & 0xffffffff, k[3] & 0xffffffff);
17462 return;
17463 }
17464 else
17465 {
17466 if (TARGET_MINIMAL_TOC)
17467 fputs ("\t.long ", file);
17468 else
17469 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17470 k[0] & 0xffffffff, k[1] & 0xffffffff,
17471 k[2] & 0xffffffff, k[3] & 0xffffffff);
17472 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17473 k[0] & 0xffffffff, k[1] & 0xffffffff,
17474 k[2] & 0xffffffff, k[3] & 0xffffffff);
17475 return;
17476 }
17477 }
00b79d54
BE
17478 else if (GET_CODE (x) == CONST_DOUBLE &&
17479 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17480 {
042259f2
DE
17481 REAL_VALUE_TYPE rv;
17482 long k[2];
0adc764e 17483
042259f2 17484 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17485
17486 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17487 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17488 else
17489 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17490
13ded975
DE
17491 if (TARGET_64BIT)
17492 {
17493 if (TARGET_MINIMAL_TOC)
2bfcf297 17494 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17495 else
2f0552b6
AM
17496 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17497 k[0] & 0xffffffff, k[1] & 0xffffffff);
17498 fprintf (file, "0x%lx%08lx\n",
17499 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17500 return;
17501 }
1875cc88 17502 else
13ded975
DE
17503 {
17504 if (TARGET_MINIMAL_TOC)
2bfcf297 17505 fputs ("\t.long ", file);
13ded975 17506 else
2f0552b6
AM
17507 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17508 k[0] & 0xffffffff, k[1] & 0xffffffff);
17509 fprintf (file, "0x%lx,0x%lx\n",
17510 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17511 return;
17512 }
9878760c 17513 }
00b79d54
BE
17514 else if (GET_CODE (x) == CONST_DOUBLE &&
17515 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17516 {
042259f2
DE
17517 REAL_VALUE_TYPE rv;
17518 long l;
9878760c 17519
042259f2 17520 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17521 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17522 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17523 else
17524 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17525
31bfaa0b
DE
17526 if (TARGET_64BIT)
17527 {
17528 if (TARGET_MINIMAL_TOC)
2bfcf297 17529 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17530 else
2f0552b6
AM
17531 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17532 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17533 return;
17534 }
042259f2 17535 else
31bfaa0b
DE
17536 {
17537 if (TARGET_MINIMAL_TOC)
2bfcf297 17538 fputs ("\t.long ", file);
31bfaa0b 17539 else
2f0552b6
AM
17540 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17541 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17542 return;
17543 }
042259f2 17544 }
f176e826 17545 else if (GET_MODE (x) == VOIDmode
a9098fd0 17546 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17547 {
e2c953b6 17548 unsigned HOST_WIDE_INT low;
042259f2
DE
17549 HOST_WIDE_INT high;
17550
17551 if (GET_CODE (x) == CONST_DOUBLE)
17552 {
17553 low = CONST_DOUBLE_LOW (x);
17554 high = CONST_DOUBLE_HIGH (x);
17555 }
17556 else
17557#if HOST_BITS_PER_WIDE_INT == 32
17558 {
17559 low = INTVAL (x);
0858c623 17560 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17561 }
17562#else
17563 {
c4ad648e
AM
17564 low = INTVAL (x) & 0xffffffff;
17565 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17566 }
17567#endif
9878760c 17568
a9098fd0
GK
17569 /* TOC entries are always Pmode-sized, but since this
17570 is a bigendian machine then if we're putting smaller
17571 integer constants in the TOC we have to pad them.
17572 (This is still a win over putting the constants in
17573 a separate constant pool, because then we'd have
02a4ec28
FS
17574 to have both a TOC entry _and_ the actual constant.)
17575
17576 For a 32-bit target, CONST_INT values are loaded and shifted
17577 entirely within `low' and can be stored in one TOC entry. */
17578
37409796
NS
17579 /* It would be easy to make this work, but it doesn't now. */
17580 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17581
17582 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17583 {
17584#if HOST_BITS_PER_WIDE_INT == 32
17585 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17586 POINTER_SIZE, &low, &high, 0);
17587#else
17588 low |= high << 32;
17589 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17590 high = (HOST_WIDE_INT) low >> 32;
17591 low &= 0xffffffff;
17592#endif
17593 }
a9098fd0 17594
13ded975
DE
17595 if (TARGET_64BIT)
17596 {
17597 if (TARGET_MINIMAL_TOC)
2bfcf297 17598 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17599 else
2f0552b6
AM
17600 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17601 (long) high & 0xffffffff, (long) low & 0xffffffff);
17602 fprintf (file, "0x%lx%08lx\n",
17603 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17604 return;
17605 }
1875cc88 17606 else
13ded975 17607 {
02a4ec28
FS
17608 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17609 {
17610 if (TARGET_MINIMAL_TOC)
2bfcf297 17611 fputs ("\t.long ", file);
02a4ec28 17612 else
2bfcf297 17613 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17614 (long) high & 0xffffffff, (long) low & 0xffffffff);
17615 fprintf (file, "0x%lx,0x%lx\n",
17616 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17617 }
13ded975 17618 else
02a4ec28
FS
17619 {
17620 if (TARGET_MINIMAL_TOC)
2bfcf297 17621 fputs ("\t.long ", file);
02a4ec28 17622 else
2f0552b6
AM
17623 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17624 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17625 }
13ded975
DE
17626 return;
17627 }
9878760c
RK
17628 }
17629
17630 if (GET_CODE (x) == CONST)
17631 {
37409796 17632 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17633
9878760c
RK
17634 base = XEXP (XEXP (x, 0), 0);
17635 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17636 }
f676971a 17637
37409796
NS
17638 switch (GET_CODE (base))
17639 {
17640 case SYMBOL_REF:
17641 name = XSTR (base, 0);
17642 break;
17643
17644 case LABEL_REF:
17645 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17646 CODE_LABEL_NUMBER (XEXP (base, 0)));
17647 break;
17648
17649 case CODE_LABEL:
17650 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17651 break;
17652
17653 default:
17654 gcc_unreachable ();
17655 }
9878760c 17656
772c5265 17657 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17658 if (TARGET_MINIMAL_TOC)
2bfcf297 17659 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17660 else
17661 {
b6c9286a 17662 fprintf (file, "\t.tc %s", real_name);
9878760c 17663
1875cc88 17664 if (offset < 0)
16fdeb48 17665 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17666 else if (offset)
16fdeb48 17667 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17668
19d2d16f 17669 fputs ("[TC],", file);
1875cc88 17670 }
581bc4de
MM
17671
17672 /* Currently C++ toc references to vtables can be emitted before it
17673 is decided whether the vtable is public or private. If this is
17674 the case, then the linker will eventually complain that there is
17675 a TOC reference to an unknown section. Thus, for vtables only,
17676 we emit the TOC reference to reference the symbol and not the
17677 section. */
28e510bd 17678 if (VTABLE_NAME_P (name))
581bc4de 17679 {
54ee9799 17680 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17681 if (offset < 0)
16fdeb48 17682 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17683 else if (offset > 0)
16fdeb48 17684 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17685 }
17686 else
17687 output_addr_const (file, x);
19d2d16f 17688 putc ('\n', file);
9878760c
RK
17689}
17690\f
17691/* Output an assembler pseudo-op to write an ASCII string of N characters
17692 starting at P to FILE.
17693
17694 On the RS/6000, we have to do this using the .byte operation and
17695 write out special characters outside the quoted string.
17696 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17697 so we must artificially break them up early. */
9878760c
RK
17698
17699void
a2369ed3 17700output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17701{
17702 char c;
17703 int i, count_string;
d330fd93
KG
17704 const char *for_string = "\t.byte \"";
17705 const char *for_decimal = "\t.byte ";
17706 const char *to_close = NULL;
9878760c
RK
17707
17708 count_string = 0;
17709 for (i = 0; i < n; i++)
17710 {
17711 c = *p++;
17712 if (c >= ' ' && c < 0177)
17713 {
17714 if (for_string)
17715 fputs (for_string, file);
17716 putc (c, file);
17717
17718 /* Write two quotes to get one. */
17719 if (c == '"')
17720 {
17721 putc (c, file);
17722 ++count_string;
17723 }
17724
17725 for_string = NULL;
17726 for_decimal = "\"\n\t.byte ";
17727 to_close = "\"\n";
17728 ++count_string;
17729
17730 if (count_string >= 512)
17731 {
17732 fputs (to_close, file);
17733
17734 for_string = "\t.byte \"";
17735 for_decimal = "\t.byte ";
17736 to_close = NULL;
17737 count_string = 0;
17738 }
17739 }
17740 else
17741 {
17742 if (for_decimal)
17743 fputs (for_decimal, file);
17744 fprintf (file, "%d", c);
17745
17746 for_string = "\n\t.byte \"";
17747 for_decimal = ", ";
17748 to_close = "\n";
17749 count_string = 0;
17750 }
17751 }
17752
17753 /* Now close the string if we have written one. Then end the line. */
17754 if (to_close)
9ebbca7d 17755 fputs (to_close, file);
9878760c
RK
17756}
17757\f
17758/* Generate a unique section name for FILENAME for a section type
17759 represented by SECTION_DESC. Output goes into BUF.
17760
17761 SECTION_DESC can be any string, as long as it is different for each
17762 possible section type.
17763
17764 We name the section in the same manner as xlc. The name begins with an
17765 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17766 names) with the last period replaced by the string SECTION_DESC. If
17767 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17768 the name. */
9878760c
RK
17769
17770void
f676971a 17771rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17772 const char *section_desc)
9878760c 17773{
9ebbca7d 17774 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17775 char *p;
17776 int len;
9878760c
RK
17777
17778 after_last_slash = filename;
17779 for (q = filename; *q; q++)
11e5fe42
RK
17780 {
17781 if (*q == '/')
17782 after_last_slash = q + 1;
17783 else if (*q == '.')
17784 last_period = q;
17785 }
9878760c 17786
11e5fe42 17787 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17788 *buf = (char *) xmalloc (len);
9878760c
RK
17789
17790 p = *buf;
17791 *p++ = '_';
17792
17793 for (q = after_last_slash; *q; q++)
17794 {
11e5fe42 17795 if (q == last_period)
c4ad648e 17796 {
9878760c
RK
17797 strcpy (p, section_desc);
17798 p += strlen (section_desc);
e3981aab 17799 break;
c4ad648e 17800 }
9878760c 17801
e9a780ec 17802 else if (ISALNUM (*q))
c4ad648e 17803 *p++ = *q;
9878760c
RK
17804 }
17805
11e5fe42 17806 if (last_period == 0)
9878760c
RK
17807 strcpy (p, section_desc);
17808 else
17809 *p = '\0';
17810}
e165f3f0 17811\f
a4f6c312 17812/* Emit profile function. */
411707f4 17813
411707f4 17814void
a2369ed3 17815output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17816{
858081ad
AH
17817 /* Non-standard profiling for kernels, which just saves LR then calls
17818 _mcount without worrying about arg saves. The idea is to change
17819 the function prologue as little as possible as it isn't easy to
17820 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17821 if (TARGET_PROFILE_KERNEL)
17822 return;
17823
8480e480
CC
17824 if (DEFAULT_ABI == ABI_AIX)
17825 {
9739c90c
JJ
17826#ifndef NO_PROFILE_COUNTERS
17827# define NO_PROFILE_COUNTERS 0
17828#endif
f676971a 17829 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17830 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17831 else
17832 {
17833 char buf[30];
17834 const char *label_name;
17835 rtx fun;
411707f4 17836
9739c90c
JJ
17837 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17838 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17839 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17840
9739c90c
JJ
17841 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17842 fun, Pmode);
17843 }
8480e480 17844 }
ee890fe2
SS
17845 else if (DEFAULT_ABI == ABI_DARWIN)
17846 {
d5fa86ba 17847 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17848 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17849
17850 /* Be conservative and always set this, at least for now. */
17851 current_function_uses_pic_offset_table = 1;
17852
17853#if TARGET_MACHO
17854 /* For PIC code, set up a stub and collect the caller's address
17855 from r0, which is where the prologue puts it. */
11abc112
MM
17856 if (MACHOPIC_INDIRECT
17857 && current_function_uses_pic_offset_table)
17858 caller_addr_regno = 0;
ee890fe2
SS
17859#endif
17860 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17861 0, VOIDmode, 1,
17862 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17863 }
411707f4
CC
17864}
17865
a4f6c312 17866/* Write function profiler code. */
e165f3f0
RK
17867
17868void
a2369ed3 17869output_function_profiler (FILE *file, int labelno)
e165f3f0 17870{
3daf36a4 17871 char buf[100];
e165f3f0 17872
38c1f2d7 17873 switch (DEFAULT_ABI)
3daf36a4 17874 {
38c1f2d7 17875 default:
37409796 17876 gcc_unreachable ();
38c1f2d7
MM
17877
17878 case ABI_V4:
09eeeacb
AM
17879 if (!TARGET_32BIT)
17880 {
d4ee4d25 17881 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17882 return;
17883 }
ffcfcb5f 17884 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17885 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17886 if (NO_PROFILE_COUNTERS)
17887 {
17888 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17889 reg_names[0], reg_names[1]);
17890 }
17891 else if (TARGET_SECURE_PLT && flag_pic)
17892 {
17893 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17894 reg_names[0], reg_names[1]);
17895 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17896 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17897 reg_names[12], reg_names[12]);
17898 assemble_name (file, buf);
17899 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17900 assemble_name (file, buf);
17901 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17902 }
17903 else if (flag_pic == 1)
38c1f2d7 17904 {
dfdfa60f 17905 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17906 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17907 reg_names[0], reg_names[1]);
17167fd8 17908 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17909 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17910 assemble_name (file, buf);
17167fd8 17911 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17912 }
9ebbca7d 17913 else if (flag_pic > 1)
38c1f2d7 17914 {
71625f3d
AM
17915 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17916 reg_names[0], reg_names[1]);
9ebbca7d 17917 /* Now, we need to get the address of the label. */
71625f3d 17918 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17919 assemble_name (file, buf);
9ebbca7d
GK
17920 fputs ("-.\n1:", file);
17921 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17922 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17923 reg_names[0], reg_names[11]);
17924 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17925 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17926 }
38c1f2d7
MM
17927 else
17928 {
17167fd8 17929 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17930 assemble_name (file, buf);
dfdfa60f 17931 fputs ("@ha\n", file);
71625f3d
AM
17932 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17933 reg_names[0], reg_names[1]);
a260abc9 17934 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17935 assemble_name (file, buf);
17167fd8 17936 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17937 }
17938
50d440bc 17939 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17940 fprintf (file, "\tbl %s%s\n",
17941 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17942 break;
17943
17944 case ABI_AIX:
ee890fe2 17945 case ABI_DARWIN:
ffcfcb5f
AM
17946 if (!TARGET_PROFILE_KERNEL)
17947 {
a3c9585f 17948 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17949 }
17950 else
17951 {
37409796 17952 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17953
17954 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17955 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17956
6de9cd9a 17957 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17958 {
17959 asm_fprintf (file, "\tstd %s,24(%s)\n",
17960 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17961 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17962 asm_fprintf (file, "\tld %s,24(%s)\n",
17963 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17964 }
17965 else
17966 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17967 }
38c1f2d7
MM
17968 break;
17969 }
e165f3f0 17970}
a251ffd0 17971
b54cf83a 17972\f
44cd321e
PS
17973
17974/* The following variable value is the last issued insn. */
17975
17976static rtx last_scheduled_insn;
17977
17978/* The following variable helps to balance issuing of load and
17979 store instructions */
17980
17981static int load_store_pendulum;
17982
b54cf83a
DE
17983/* Power4 load update and store update instructions are cracked into a
17984 load or store and an integer insn which are executed in the same cycle.
17985 Branches have their own dispatch slot which does not count against the
17986 GCC issue rate, but it changes the program flow so there are no other
17987 instructions to issue in this cycle. */
17988
17989static int
f676971a
EC
17990rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17991 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17992 rtx insn, int more)
b54cf83a 17993{
44cd321e 17994 last_scheduled_insn = insn;
b54cf83a
DE
17995 if (GET_CODE (PATTERN (insn)) == USE
17996 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17997 {
17998 cached_can_issue_more = more;
17999 return cached_can_issue_more;
18000 }
18001
18002 if (insn_terminates_group_p (insn, current_group))
18003 {
18004 cached_can_issue_more = 0;
18005 return cached_can_issue_more;
18006 }
b54cf83a 18007
d296e02e
AP
18008 /* If no reservation, but reach here */
18009 if (recog_memoized (insn) < 0)
18010 return more;
18011
ec507f2d 18012 if (rs6000_sched_groups)
b54cf83a 18013 {
cbe26ab8 18014 if (is_microcoded_insn (insn))
44cd321e 18015 cached_can_issue_more = 0;
cbe26ab8 18016 else if (is_cracked_insn (insn))
44cd321e
PS
18017 cached_can_issue_more = more > 2 ? more - 2 : 0;
18018 else
18019 cached_can_issue_more = more - 1;
18020
18021 return cached_can_issue_more;
b54cf83a 18022 }
165b263e 18023
d296e02e
AP
18024 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18025 return 0;
18026
44cd321e
PS
18027 cached_can_issue_more = more - 1;
18028 return cached_can_issue_more;
b54cf83a
DE
18029}
18030
a251ffd0
TG
18031/* Adjust the cost of a scheduling dependency. Return the new cost of
18032 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18033
c237e94a 18034static int
0a4f0294 18035rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18036{
44cd321e 18037 enum attr_type attr_type;
a251ffd0 18038
44cd321e 18039 if (! recog_memoized (insn))
a251ffd0
TG
18040 return 0;
18041
44cd321e 18042 switch (REG_NOTE_KIND (link))
a251ffd0 18043 {
44cd321e
PS
18044 case REG_DEP_TRUE:
18045 {
18046 /* Data dependency; DEP_INSN writes a register that INSN reads
18047 some cycles later. */
18048
18049 /* Separate a load from a narrower, dependent store. */
18050 if (rs6000_sched_groups
18051 && GET_CODE (PATTERN (insn)) == SET
18052 && GET_CODE (PATTERN (dep_insn)) == SET
18053 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18054 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18055 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18056 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18057 return cost + 14;
18058
18059 attr_type = get_attr_type (insn);
18060
18061 switch (attr_type)
18062 {
18063 case TYPE_JMPREG:
18064 /* Tell the first scheduling pass about the latency between
18065 a mtctr and bctr (and mtlr and br/blr). The first
18066 scheduling pass will not know about this latency since
18067 the mtctr instruction, which has the latency associated
18068 to it, will be generated by reload. */
18069 return TARGET_POWER ? 5 : 4;
18070 case TYPE_BRANCH:
18071 /* Leave some extra cycles between a compare and its
18072 dependent branch, to inhibit expensive mispredicts. */
18073 if ((rs6000_cpu_attr == CPU_PPC603
18074 || rs6000_cpu_attr == CPU_PPC604
18075 || rs6000_cpu_attr == CPU_PPC604E
18076 || rs6000_cpu_attr == CPU_PPC620
18077 || rs6000_cpu_attr == CPU_PPC630
18078 || rs6000_cpu_attr == CPU_PPC750
18079 || rs6000_cpu_attr == CPU_PPC7400
18080 || rs6000_cpu_attr == CPU_PPC7450
18081 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18082 || rs6000_cpu_attr == CPU_POWER5
18083 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18084 && recog_memoized (dep_insn)
18085 && (INSN_CODE (dep_insn) >= 0))
982afe02 18086
44cd321e
PS
18087 switch (get_attr_type (dep_insn))
18088 {
18089 case TYPE_CMP:
18090 case TYPE_COMPARE:
18091 case TYPE_DELAYED_COMPARE:
18092 case TYPE_IMUL_COMPARE:
18093 case TYPE_LMUL_COMPARE:
18094 case TYPE_FPCOMPARE:
18095 case TYPE_CR_LOGICAL:
18096 case TYPE_DELAYED_CR:
18097 return cost + 2;
18098 default:
18099 break;
18100 }
18101 break;
18102
18103 case TYPE_STORE:
18104 case TYPE_STORE_U:
18105 case TYPE_STORE_UX:
18106 case TYPE_FPSTORE:
18107 case TYPE_FPSTORE_U:
18108 case TYPE_FPSTORE_UX:
18109 if ((rs6000_cpu == PROCESSOR_POWER6)
18110 && recog_memoized (dep_insn)
18111 && (INSN_CODE (dep_insn) >= 0))
18112 {
18113
18114 if (GET_CODE (PATTERN (insn)) != SET)
18115 /* If this happens, we have to extend this to schedule
18116 optimally. Return default for now. */
18117 return cost;
18118
18119 /* Adjust the cost for the case where the value written
18120 by a fixed point operation is used as the address
18121 gen value on a store. */
18122 switch (get_attr_type (dep_insn))
18123 {
18124 case TYPE_LOAD:
18125 case TYPE_LOAD_U:
18126 case TYPE_LOAD_UX:
18127 case TYPE_CNTLZ:
18128 {
18129 if (! store_data_bypass_p (dep_insn, insn))
18130 return 4;
18131 break;
18132 }
18133 case TYPE_LOAD_EXT:
18134 case TYPE_LOAD_EXT_U:
18135 case TYPE_LOAD_EXT_UX:
18136 case TYPE_VAR_SHIFT_ROTATE:
18137 case TYPE_VAR_DELAYED_COMPARE:
18138 {
18139 if (! store_data_bypass_p (dep_insn, insn))
18140 return 6;
18141 break;
18142 }
18143 case TYPE_INTEGER:
18144 case TYPE_COMPARE:
18145 case TYPE_FAST_COMPARE:
18146 case TYPE_EXTS:
18147 case TYPE_SHIFT:
18148 case TYPE_INSERT_WORD:
18149 case TYPE_INSERT_DWORD:
18150 case TYPE_FPLOAD_U:
18151 case TYPE_FPLOAD_UX:
18152 case TYPE_STORE_U:
18153 case TYPE_STORE_UX:
18154 case TYPE_FPSTORE_U:
18155 case TYPE_FPSTORE_UX:
18156 {
18157 if (! store_data_bypass_p (dep_insn, insn))
18158 return 3;
18159 break;
18160 }
18161 case TYPE_IMUL:
18162 case TYPE_IMUL2:
18163 case TYPE_IMUL3:
18164 case TYPE_LMUL:
18165 case TYPE_IMUL_COMPARE:
18166 case TYPE_LMUL_COMPARE:
18167 {
18168 if (! store_data_bypass_p (dep_insn, insn))
18169 return 17;
18170 break;
18171 }
18172 case TYPE_IDIV:
18173 {
18174 if (! store_data_bypass_p (dep_insn, insn))
18175 return 45;
18176 break;
18177 }
18178 case TYPE_LDIV:
18179 {
18180 if (! store_data_bypass_p (dep_insn, insn))
18181 return 57;
18182 break;
18183 }
18184 default:
18185 break;
18186 }
18187 }
18188 break;
18189
18190 case TYPE_LOAD:
18191 case TYPE_LOAD_U:
18192 case TYPE_LOAD_UX:
18193 case TYPE_LOAD_EXT:
18194 case TYPE_LOAD_EXT_U:
18195 case TYPE_LOAD_EXT_UX:
18196 if ((rs6000_cpu == PROCESSOR_POWER6)
18197 && recog_memoized (dep_insn)
18198 && (INSN_CODE (dep_insn) >= 0))
18199 {
18200
18201 /* Adjust the cost for the case where the value written
18202 by a fixed point instruction is used within the address
18203 gen portion of a subsequent load(u)(x) */
18204 switch (get_attr_type (dep_insn))
18205 {
18206 case TYPE_LOAD:
18207 case TYPE_LOAD_U:
18208 case TYPE_LOAD_UX:
18209 case TYPE_CNTLZ:
18210 {
18211 if (set_to_load_agen (dep_insn, insn))
18212 return 4;
18213 break;
18214 }
18215 case TYPE_LOAD_EXT:
18216 case TYPE_LOAD_EXT_U:
18217 case TYPE_LOAD_EXT_UX:
18218 case TYPE_VAR_SHIFT_ROTATE:
18219 case TYPE_VAR_DELAYED_COMPARE:
18220 {
18221 if (set_to_load_agen (dep_insn, insn))
18222 return 6;
18223 break;
18224 }
18225 case TYPE_INTEGER:
18226 case TYPE_COMPARE:
18227 case TYPE_FAST_COMPARE:
18228 case TYPE_EXTS:
18229 case TYPE_SHIFT:
18230 case TYPE_INSERT_WORD:
18231 case TYPE_INSERT_DWORD:
18232 case TYPE_FPLOAD_U:
18233 case TYPE_FPLOAD_UX:
18234 case TYPE_STORE_U:
18235 case TYPE_STORE_UX:
18236 case TYPE_FPSTORE_U:
18237 case TYPE_FPSTORE_UX:
18238 {
18239 if (set_to_load_agen (dep_insn, insn))
18240 return 3;
18241 break;
18242 }
18243 case TYPE_IMUL:
18244 case TYPE_IMUL2:
18245 case TYPE_IMUL3:
18246 case TYPE_LMUL:
18247 case TYPE_IMUL_COMPARE:
18248 case TYPE_LMUL_COMPARE:
18249 {
18250 if (set_to_load_agen (dep_insn, insn))
18251 return 17;
18252 break;
18253 }
18254 case TYPE_IDIV:
18255 {
18256 if (set_to_load_agen (dep_insn, insn))
18257 return 45;
18258 break;
18259 }
18260 case TYPE_LDIV:
18261 {
18262 if (set_to_load_agen (dep_insn, insn))
18263 return 57;
18264 break;
18265 }
18266 default:
18267 break;
18268 }
18269 }
18270 break;
18271
18272 case TYPE_FPLOAD:
18273 if ((rs6000_cpu == PROCESSOR_POWER6)
18274 && recog_memoized (dep_insn)
18275 && (INSN_CODE (dep_insn) >= 0)
18276 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18277 return 2;
18278
18279 default:
18280 break;
18281 }
c9dbf840 18282
a251ffd0 18283 /* Fall out to return default cost. */
44cd321e
PS
18284 }
18285 break;
18286
18287 case REG_DEP_OUTPUT:
18288 /* Output dependency; DEP_INSN writes a register that INSN writes some
18289 cycles later. */
18290 if ((rs6000_cpu == PROCESSOR_POWER6)
18291 && recog_memoized (dep_insn)
18292 && (INSN_CODE (dep_insn) >= 0))
18293 {
18294 attr_type = get_attr_type (insn);
18295
18296 switch (attr_type)
18297 {
18298 case TYPE_FP:
18299 if (get_attr_type (dep_insn) == TYPE_FP)
18300 return 1;
18301 break;
18302 case TYPE_FPLOAD:
18303 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18304 return 2;
18305 break;
18306 default:
18307 break;
18308 }
18309 }
18310 case REG_DEP_ANTI:
18311 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18312 cycles later. */
18313 return 0;
18314
18315 default:
18316 gcc_unreachable ();
a251ffd0
TG
18317 }
18318
18319 return cost;
18320}
b6c9286a 18321
cbe26ab8 18322/* The function returns a true if INSN is microcoded.
839a4992 18323 Return false otherwise. */
cbe26ab8
DN
18324
18325static bool
18326is_microcoded_insn (rtx insn)
18327{
18328 if (!insn || !INSN_P (insn)
18329 || GET_CODE (PATTERN (insn)) == USE
18330 || GET_CODE (PATTERN (insn)) == CLOBBER)
18331 return false;
18332
d296e02e
AP
18333 if (rs6000_cpu_attr == CPU_CELL)
18334 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18335
ec507f2d 18336 if (rs6000_sched_groups)
cbe26ab8
DN
18337 {
18338 enum attr_type type = get_attr_type (insn);
18339 if (type == TYPE_LOAD_EXT_U
18340 || type == TYPE_LOAD_EXT_UX
18341 || type == TYPE_LOAD_UX
18342 || type == TYPE_STORE_UX
18343 || type == TYPE_MFCR)
c4ad648e 18344 return true;
cbe26ab8
DN
18345 }
18346
18347 return false;
18348}
18349
cbe26ab8
DN
18350/* The function returns true if INSN is cracked into 2 instructions
18351 by the processor (and therefore occupies 2 issue slots). */
18352
18353static bool
18354is_cracked_insn (rtx insn)
18355{
18356 if (!insn || !INSN_P (insn)
18357 || GET_CODE (PATTERN (insn)) == USE
18358 || GET_CODE (PATTERN (insn)) == CLOBBER)
18359 return false;
18360
ec507f2d 18361 if (rs6000_sched_groups)
cbe26ab8
DN
18362 {
18363 enum attr_type type = get_attr_type (insn);
18364 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18365 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18366 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18367 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18368 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18369 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18370 || type == TYPE_IDIV || type == TYPE_LDIV
18371 || type == TYPE_INSERT_WORD)
18372 return true;
cbe26ab8
DN
18373 }
18374
18375 return false;
18376}
18377
18378/* The function returns true if INSN can be issued only from
a3c9585f 18379 the branch slot. */
cbe26ab8
DN
18380
18381static bool
18382is_branch_slot_insn (rtx insn)
18383{
18384 if (!insn || !INSN_P (insn)
18385 || GET_CODE (PATTERN (insn)) == USE
18386 || GET_CODE (PATTERN (insn)) == CLOBBER)
18387 return false;
18388
ec507f2d 18389 if (rs6000_sched_groups)
cbe26ab8
DN
18390 {
18391 enum attr_type type = get_attr_type (insn);
18392 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18393 return true;
cbe26ab8
DN
18394 return false;
18395 }
18396
18397 return false;
18398}
79ae11c4 18399
44cd321e
PS
18400/* The function returns true if out_inst sets a value that is
18401 used in the address generation computation of in_insn */
18402static bool
18403set_to_load_agen (rtx out_insn, rtx in_insn)
18404{
18405 rtx out_set, in_set;
18406
18407 /* For performance reasons, only handle the simple case where
18408 both loads are a single_set. */
18409 out_set = single_set (out_insn);
18410 if (out_set)
18411 {
18412 in_set = single_set (in_insn);
18413 if (in_set)
18414 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18415 }
18416
18417 return false;
18418}
18419
18420/* The function returns true if the target storage location of
18421 out_insn is adjacent to the target storage location of in_insn */
18422/* Return 1 if memory locations are adjacent. */
18423
18424static bool
18425adjacent_mem_locations (rtx insn1, rtx insn2)
18426{
18427
e3a0e200
PB
18428 rtx a = get_store_dest (PATTERN (insn1));
18429 rtx b = get_store_dest (PATTERN (insn2));
18430
44cd321e
PS
18431 if ((GET_CODE (XEXP (a, 0)) == REG
18432 || (GET_CODE (XEXP (a, 0)) == PLUS
18433 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18434 && (GET_CODE (XEXP (b, 0)) == REG
18435 || (GET_CODE (XEXP (b, 0)) == PLUS
18436 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18437 {
f98e8938 18438 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18439 rtx reg0, reg1;
44cd321e
PS
18440
18441 if (GET_CODE (XEXP (a, 0)) == PLUS)
18442 {
18443 reg0 = XEXP (XEXP (a, 0), 0);
18444 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18445 }
18446 else
18447 reg0 = XEXP (a, 0);
18448
18449 if (GET_CODE (XEXP (b, 0)) == PLUS)
18450 {
18451 reg1 = XEXP (XEXP (b, 0), 0);
18452 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18453 }
18454 else
18455 reg1 = XEXP (b, 0);
18456
18457 val_diff = val1 - val0;
18458
18459 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18460 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18461 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18462 }
18463
18464 return false;
18465}
18466
a4f6c312 18467/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18468 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18469 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18470 define this macro if you do not need to adjust the scheduling
18471 priorities of insns. */
bef84347 18472
c237e94a 18473static int
a2369ed3 18474rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18475{
a4f6c312
SS
18476 /* On machines (like the 750) which have asymmetric integer units,
18477 where one integer unit can do multiply and divides and the other
18478 can't, reduce the priority of multiply/divide so it is scheduled
18479 before other integer operations. */
bef84347
VM
18480
18481#if 0
2c3c49de 18482 if (! INSN_P (insn))
bef84347
VM
18483 return priority;
18484
18485 if (GET_CODE (PATTERN (insn)) == USE)
18486 return priority;
18487
18488 switch (rs6000_cpu_attr) {
18489 case CPU_PPC750:
18490 switch (get_attr_type (insn))
18491 {
18492 default:
18493 break;
18494
18495 case TYPE_IMUL:
18496 case TYPE_IDIV:
3cb999d8
DE
18497 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18498 priority, priority);
bef84347
VM
18499 if (priority >= 0 && priority < 0x01000000)
18500 priority >>= 3;
18501 break;
18502 }
18503 }
18504#endif
18505
44cd321e 18506 if (insn_must_be_first_in_group (insn)
79ae11c4 18507 && reload_completed
f676971a 18508 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18509 && rs6000_sched_restricted_insns_priority)
18510 {
18511
c4ad648e
AM
18512 /* Prioritize insns that can be dispatched only in the first
18513 dispatch slot. */
79ae11c4 18514 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18515 /* Attach highest priority to insn. This means that in
18516 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18517 precede 'priority' (critical path) considerations. */
f676971a 18518 return current_sched_info->sched_max_insns_priority;
79ae11c4 18519 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18520 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18521 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18522 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18523 return (priority + 1);
18524 }
79ae11c4 18525
44cd321e
PS
18526 if (rs6000_cpu == PROCESSOR_POWER6
18527 && ((load_store_pendulum == -2 && is_load_insn (insn))
18528 || (load_store_pendulum == 2 && is_store_insn (insn))))
18529 /* Attach highest priority to insn if the scheduler has just issued two
18530 stores and this instruction is a load, or two loads and this instruction
18531 is a store. Power6 wants loads and stores scheduled alternately
18532 when possible */
18533 return current_sched_info->sched_max_insns_priority;
18534
bef84347
VM
18535 return priority;
18536}
18537
d296e02e
AP
18538/* Return true if the instruction is nonpipelined on the Cell. */
18539static bool
18540is_nonpipeline_insn (rtx insn)
18541{
18542 enum attr_type type;
18543 if (!insn || !INSN_P (insn)
18544 || GET_CODE (PATTERN (insn)) == USE
18545 || GET_CODE (PATTERN (insn)) == CLOBBER)
18546 return false;
18547
18548 type = get_attr_type (insn);
18549 if (type == TYPE_IMUL
18550 || type == TYPE_IMUL2
18551 || type == TYPE_IMUL3
18552 || type == TYPE_LMUL
18553 || type == TYPE_IDIV
18554 || type == TYPE_LDIV
18555 || type == TYPE_SDIV
18556 || type == TYPE_DDIV
18557 || type == TYPE_SSQRT
18558 || type == TYPE_DSQRT
18559 || type == TYPE_MFCR
18560 || type == TYPE_MFCRF
18561 || type == TYPE_MFJMPR)
18562 {
18563 return true;
18564 }
18565 return false;
18566}
18567
18568
a4f6c312
SS
18569/* Return how many instructions the machine can issue per cycle. */
18570
c237e94a 18571static int
863d938c 18572rs6000_issue_rate (void)
b6c9286a 18573{
3317bab1
DE
18574 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18575 if (!reload_completed)
18576 return 1;
18577
b6c9286a 18578 switch (rs6000_cpu_attr) {
3cb999d8
DE
18579 case CPU_RIOS1: /* ? */
18580 case CPU_RS64A:
18581 case CPU_PPC601: /* ? */
ed947a96 18582 case CPU_PPC7450:
3cb999d8 18583 return 3;
b54cf83a 18584 case CPU_PPC440:
b6c9286a 18585 case CPU_PPC603:
bef84347 18586 case CPU_PPC750:
ed947a96 18587 case CPU_PPC7400:
be12c2b0 18588 case CPU_PPC8540:
d296e02e 18589 case CPU_CELL:
fa41c305
EW
18590 case CPU_PPCE300C2:
18591 case CPU_PPCE300C3:
f676971a 18592 return 2;
3cb999d8 18593 case CPU_RIOS2:
b6c9286a 18594 case CPU_PPC604:
19684119 18595 case CPU_PPC604E:
b6c9286a 18596 case CPU_PPC620:
3cb999d8 18597 case CPU_PPC630:
b6c9286a 18598 return 4;
cbe26ab8 18599 case CPU_POWER4:
ec507f2d 18600 case CPU_POWER5:
44cd321e 18601 case CPU_POWER6:
cbe26ab8 18602 return 5;
b6c9286a
MM
18603 default:
18604 return 1;
18605 }
18606}
18607
be12c2b0
VM
18608/* Return how many instructions to look ahead for better insn
18609 scheduling. */
18610
18611static int
863d938c 18612rs6000_use_sched_lookahead (void)
be12c2b0
VM
18613{
18614 if (rs6000_cpu_attr == CPU_PPC8540)
18615 return 4;
d296e02e
AP
18616 if (rs6000_cpu_attr == CPU_CELL)
18617 return (reload_completed ? 8 : 0);
be12c2b0
VM
18618 return 0;
18619}
18620
d296e02e
AP
18621/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18622static int
18623rs6000_use_sched_lookahead_guard (rtx insn)
18624{
18625 if (rs6000_cpu_attr != CPU_CELL)
18626 return 1;
18627
18628 if (insn == NULL_RTX || !INSN_P (insn))
18629 abort ();
982afe02 18630
d296e02e
AP
18631 if (!reload_completed
18632 || is_nonpipeline_insn (insn)
18633 || is_microcoded_insn (insn))
18634 return 0;
18635
18636 return 1;
18637}
18638
569fa502
DN
18639/* Determine is PAT refers to memory. */
18640
18641static bool
18642is_mem_ref (rtx pat)
18643{
18644 const char * fmt;
18645 int i, j;
18646 bool ret = false;
18647
1de59bbd
DE
18648 /* stack_tie does not produce any real memory traffic. */
18649 if (GET_CODE (pat) == UNSPEC
18650 && XINT (pat, 1) == UNSPEC_TIE)
18651 return false;
18652
569fa502
DN
18653 if (GET_CODE (pat) == MEM)
18654 return true;
18655
18656 /* Recursively process the pattern. */
18657 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18658
18659 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18660 {
18661 if (fmt[i] == 'e')
18662 ret |= is_mem_ref (XEXP (pat, i));
18663 else if (fmt[i] == 'E')
18664 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18665 ret |= is_mem_ref (XVECEXP (pat, i, j));
18666 }
18667
18668 return ret;
18669}
18670
18671/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18672
569fa502
DN
18673static bool
18674is_load_insn1 (rtx pat)
18675{
18676 if (!pat || pat == NULL_RTX)
18677 return false;
18678
18679 if (GET_CODE (pat) == SET)
18680 return is_mem_ref (SET_SRC (pat));
18681
18682 if (GET_CODE (pat) == PARALLEL)
18683 {
18684 int i;
18685
18686 for (i = 0; i < XVECLEN (pat, 0); i++)
18687 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18688 return true;
18689 }
18690
18691 return false;
18692}
18693
18694/* Determine if INSN loads from memory. */
18695
18696static bool
18697is_load_insn (rtx insn)
18698{
18699 if (!insn || !INSN_P (insn))
18700 return false;
18701
18702 if (GET_CODE (insn) == CALL_INSN)
18703 return false;
18704
18705 return is_load_insn1 (PATTERN (insn));
18706}
18707
18708/* Determine if PAT is a PATTERN of a store insn. */
18709
18710static bool
18711is_store_insn1 (rtx pat)
18712{
18713 if (!pat || pat == NULL_RTX)
18714 return false;
18715
18716 if (GET_CODE (pat) == SET)
18717 return is_mem_ref (SET_DEST (pat));
18718
18719 if (GET_CODE (pat) == PARALLEL)
18720 {
18721 int i;
18722
18723 for (i = 0; i < XVECLEN (pat, 0); i++)
18724 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18725 return true;
18726 }
18727
18728 return false;
18729}
18730
18731/* Determine if INSN stores to memory. */
18732
18733static bool
18734is_store_insn (rtx insn)
18735{
18736 if (!insn || !INSN_P (insn))
18737 return false;
18738
18739 return is_store_insn1 (PATTERN (insn));
18740}
18741
e3a0e200
PB
18742/* Return the dest of a store insn. */
18743
18744static rtx
18745get_store_dest (rtx pat)
18746{
18747 gcc_assert (is_store_insn1 (pat));
18748
18749 if (GET_CODE (pat) == SET)
18750 return SET_DEST (pat);
18751 else if (GET_CODE (pat) == PARALLEL)
18752 {
18753 int i;
18754
18755 for (i = 0; i < XVECLEN (pat, 0); i++)
18756 {
18757 rtx inner_pat = XVECEXP (pat, 0, i);
18758 if (GET_CODE (inner_pat) == SET
18759 && is_mem_ref (SET_DEST (inner_pat)))
18760 return inner_pat;
18761 }
18762 }
18763 /* We shouldn't get here, because we should have either a simple
18764 store insn or a store with update which are covered above. */
18765 gcc_unreachable();
18766}
18767
569fa502
DN
18768/* Returns whether the dependence between INSN and NEXT is considered
18769 costly by the given target. */
18770
18771static bool
b198261f 18772rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18773{
b198261f
MK
18774 rtx insn;
18775 rtx next;
18776
aabcd309 18777 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18778 allow all dependent insns in the same group.
569fa502
DN
18779 This is the most aggressive option. */
18780 if (rs6000_sched_costly_dep == no_dep_costly)
18781 return false;
18782
f676971a 18783 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18784 do not allow dependent instructions in the same group.
18785 This is the most conservative option. */
18786 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18787 return true;
569fa502 18788
b198261f
MK
18789 insn = DEP_PRO (dep);
18790 next = DEP_CON (dep);
18791
f676971a
EC
18792 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18793 && is_load_insn (next)
569fa502
DN
18794 && is_store_insn (insn))
18795 /* Prevent load after store in the same group. */
18796 return true;
18797
18798 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18799 && is_load_insn (next)
569fa502 18800 && is_store_insn (insn)
e2f6ff94 18801 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18802 /* Prevent load after store in the same group if it is a true
18803 dependence. */
569fa502 18804 return true;
f676971a
EC
18805
18806 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18807 and will not be scheduled in the same group. */
18808 if (rs6000_sched_costly_dep <= max_dep_latency
18809 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18810 return true;
18811
18812 return false;
18813}
18814
f676971a 18815/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18816 skipping any "non-active" insns - insns that will not actually occupy
18817 an issue slot. Return NULL_RTX if such an insn is not found. */
18818
18819static rtx
18820get_next_active_insn (rtx insn, rtx tail)
18821{
f489aff8 18822 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18823 return NULL_RTX;
18824
f489aff8 18825 while (1)
cbe26ab8 18826 {
f489aff8
AM
18827 insn = NEXT_INSN (insn);
18828 if (insn == NULL_RTX || insn == tail)
18829 return NULL_RTX;
cbe26ab8 18830
f489aff8
AM
18831 if (CALL_P (insn)
18832 || JUMP_P (insn)
18833 || (NONJUMP_INSN_P (insn)
18834 && GET_CODE (PATTERN (insn)) != USE
18835 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18836 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18837 break;
18838 }
18839 return insn;
cbe26ab8
DN
18840}
18841
44cd321e
PS
18842/* We are about to begin issuing insns for this clock cycle. */
18843
18844static int
18845rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18846 rtx *ready ATTRIBUTE_UNUSED,
18847 int *pn_ready ATTRIBUTE_UNUSED,
18848 int clock_var ATTRIBUTE_UNUSED)
18849{
d296e02e
AP
18850 int n_ready = *pn_ready;
18851
44cd321e
PS
18852 if (sched_verbose)
18853 fprintf (dump, "// rs6000_sched_reorder :\n");
18854
d296e02e
AP
18855 /* Reorder the ready list, if the second to last ready insn
18856 is a nonepipeline insn. */
18857 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18858 {
18859 if (is_nonpipeline_insn (ready[n_ready - 1])
18860 && (recog_memoized (ready[n_ready - 2]) > 0))
18861 /* Simply swap first two insns. */
18862 {
18863 rtx tmp = ready[n_ready - 1];
18864 ready[n_ready - 1] = ready[n_ready - 2];
18865 ready[n_ready - 2] = tmp;
18866 }
18867 }
18868
44cd321e
PS
18869 if (rs6000_cpu == PROCESSOR_POWER6)
18870 load_store_pendulum = 0;
18871
18872 return rs6000_issue_rate ();
18873}
18874
18875/* Like rs6000_sched_reorder, but called after issuing each insn. */
18876
18877static int
18878rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18879 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18880{
18881 if (sched_verbose)
18882 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18883
18884 /* For Power6, we need to handle some special cases to try and keep the
18885 store queue from overflowing and triggering expensive flushes.
18886
18887 This code monitors how load and store instructions are being issued
18888 and skews the ready list one way or the other to increase the likelihood
18889 that a desired instruction is issued at the proper time.
18890
18891 A couple of things are done. First, we maintain a "load_store_pendulum"
18892 to track the current state of load/store issue.
18893
18894 - If the pendulum is at zero, then no loads or stores have been
18895 issued in the current cycle so we do nothing.
18896
18897 - If the pendulum is 1, then a single load has been issued in this
18898 cycle and we attempt to locate another load in the ready list to
18899 issue with it.
18900
2f8e468b 18901 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18902 issued in this cycle, so we increase the priority of the first load
18903 in the ready list to increase it's likelihood of being chosen first
18904 in the next cycle.
18905
18906 - If the pendulum is -1, then a single store has been issued in this
18907 cycle and we attempt to locate another store in the ready list to
18908 issue with it, preferring a store to an adjacent memory location to
18909 facilitate store pairing in the store queue.
18910
18911 - If the pendulum is 2, then two loads have already been
18912 issued in this cycle, so we increase the priority of the first store
18913 in the ready list to increase it's likelihood of being chosen first
18914 in the next cycle.
18915
18916 - If the pendulum < -2 or > 2, then do nothing.
18917
18918 Note: This code covers the most common scenarios. There exist non
18919 load/store instructions which make use of the LSU and which
18920 would need to be accounted for to strictly model the behavior
18921 of the machine. Those instructions are currently unaccounted
18922 for to help minimize compile time overhead of this code.
18923 */
18924 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18925 {
18926 int pos;
18927 int i;
18928 rtx tmp;
18929
18930 if (is_store_insn (last_scheduled_insn))
18931 /* Issuing a store, swing the load_store_pendulum to the left */
18932 load_store_pendulum--;
18933 else if (is_load_insn (last_scheduled_insn))
18934 /* Issuing a load, swing the load_store_pendulum to the right */
18935 load_store_pendulum++;
18936 else
18937 return cached_can_issue_more;
18938
18939 /* If the pendulum is balanced, or there is only one instruction on
18940 the ready list, then all is well, so return. */
18941 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18942 return cached_can_issue_more;
18943
18944 if (load_store_pendulum == 1)
18945 {
18946 /* A load has been issued in this cycle. Scan the ready list
18947 for another load to issue with it */
18948 pos = *pn_ready-1;
18949
18950 while (pos >= 0)
18951 {
18952 if (is_load_insn (ready[pos]))
18953 {
18954 /* Found a load. Move it to the head of the ready list,
18955 and adjust it's priority so that it is more likely to
18956 stay there */
18957 tmp = ready[pos];
18958 for (i=pos; i<*pn_ready-1; i++)
18959 ready[i] = ready[i + 1];
18960 ready[*pn_ready-1] = tmp;
18961 if INSN_PRIORITY_KNOWN (tmp)
18962 INSN_PRIORITY (tmp)++;
18963 break;
18964 }
18965 pos--;
18966 }
18967 }
18968 else if (load_store_pendulum == -2)
18969 {
18970 /* Two stores have been issued in this cycle. Increase the
18971 priority of the first load in the ready list to favor it for
18972 issuing in the next cycle. */
18973 pos = *pn_ready-1;
18974
18975 while (pos >= 0)
18976 {
18977 if (is_load_insn (ready[pos])
18978 && INSN_PRIORITY_KNOWN (ready[pos]))
18979 {
18980 INSN_PRIORITY (ready[pos])++;
18981
18982 /* Adjust the pendulum to account for the fact that a load
18983 was found and increased in priority. This is to prevent
18984 increasing the priority of multiple loads */
18985 load_store_pendulum--;
18986
18987 break;
18988 }
18989 pos--;
18990 }
18991 }
18992 else if (load_store_pendulum == -1)
18993 {
18994 /* A store has been issued in this cycle. Scan the ready list for
18995 another store to issue with it, preferring a store to an adjacent
18996 memory location */
18997 int first_store_pos = -1;
18998
18999 pos = *pn_ready-1;
19000
19001 while (pos >= 0)
19002 {
19003 if (is_store_insn (ready[pos]))
19004 {
19005 /* Maintain the index of the first store found on the
19006 list */
19007 if (first_store_pos == -1)
19008 first_store_pos = pos;
19009
19010 if (is_store_insn (last_scheduled_insn)
19011 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19012 {
19013 /* Found an adjacent store. Move it to the head of the
19014 ready list, and adjust it's priority so that it is
19015 more likely to stay there */
19016 tmp = ready[pos];
19017 for (i=pos; i<*pn_ready-1; i++)
19018 ready[i] = ready[i + 1];
19019 ready[*pn_ready-1] = tmp;
19020 if INSN_PRIORITY_KNOWN (tmp)
19021 INSN_PRIORITY (tmp)++;
19022 first_store_pos = -1;
19023
19024 break;
19025 };
19026 }
19027 pos--;
19028 }
19029
19030 if (first_store_pos >= 0)
19031 {
19032 /* An adjacent store wasn't found, but a non-adjacent store was,
19033 so move the non-adjacent store to the front of the ready
19034 list, and adjust its priority so that it is more likely to
19035 stay there. */
19036 tmp = ready[first_store_pos];
19037 for (i=first_store_pos; i<*pn_ready-1; i++)
19038 ready[i] = ready[i + 1];
19039 ready[*pn_ready-1] = tmp;
19040 if INSN_PRIORITY_KNOWN (tmp)
19041 INSN_PRIORITY (tmp)++;
19042 }
19043 }
19044 else if (load_store_pendulum == 2)
19045 {
19046 /* Two loads have been issued in this cycle. Increase the priority
19047 of the first store in the ready list to favor it for issuing in
19048 the next cycle. */
19049 pos = *pn_ready-1;
19050
19051 while (pos >= 0)
19052 {
19053 if (is_store_insn (ready[pos])
19054 && INSN_PRIORITY_KNOWN (ready[pos]))
19055 {
19056 INSN_PRIORITY (ready[pos])++;
19057
19058 /* Adjust the pendulum to account for the fact that a store
19059 was found and increased in priority. This is to prevent
19060 increasing the priority of multiple stores */
19061 load_store_pendulum++;
19062
19063 break;
19064 }
19065 pos--;
19066 }
19067 }
19068 }
19069
19070 return cached_can_issue_more;
19071}
19072
839a4992 19073/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19074 of group WHICH_GROUP.
19075
19076 If WHICH_GROUP == current_group, this function will return true if INSN
19077 causes the termination of the current group (i.e, the dispatch group to
19078 which INSN belongs). This means that INSN will be the last insn in the
19079 group it belongs to.
19080
19081 If WHICH_GROUP == previous_group, this function will return true if INSN
19082 causes the termination of the previous group (i.e, the dispatch group that
19083 precedes the group to which INSN belongs). This means that INSN will be
19084 the first insn in the group it belongs to). */
19085
19086static bool
19087insn_terminates_group_p (rtx insn, enum group_termination which_group)
19088{
44cd321e 19089 bool first, last;
cbe26ab8
DN
19090
19091 if (! insn)
19092 return false;
569fa502 19093
44cd321e
PS
19094 first = insn_must_be_first_in_group (insn);
19095 last = insn_must_be_last_in_group (insn);
cbe26ab8 19096
44cd321e 19097 if (first && last)
cbe26ab8
DN
19098 return true;
19099
19100 if (which_group == current_group)
44cd321e 19101 return last;
cbe26ab8 19102 else if (which_group == previous_group)
44cd321e
PS
19103 return first;
19104
19105 return false;
19106}
19107
19108
19109static bool
19110insn_must_be_first_in_group (rtx insn)
19111{
19112 enum attr_type type;
19113
19114 if (!insn
19115 || insn == NULL_RTX
19116 || GET_CODE (insn) == NOTE
19117 || GET_CODE (PATTERN (insn)) == USE
19118 || GET_CODE (PATTERN (insn)) == CLOBBER)
19119 return false;
19120
19121 switch (rs6000_cpu)
cbe26ab8 19122 {
44cd321e
PS
19123 case PROCESSOR_POWER5:
19124 if (is_cracked_insn (insn))
19125 return true;
19126 case PROCESSOR_POWER4:
19127 if (is_microcoded_insn (insn))
19128 return true;
19129
19130 if (!rs6000_sched_groups)
19131 return false;
19132
19133 type = get_attr_type (insn);
19134
19135 switch (type)
19136 {
19137 case TYPE_MFCR:
19138 case TYPE_MFCRF:
19139 case TYPE_MTCR:
19140 case TYPE_DELAYED_CR:
19141 case TYPE_CR_LOGICAL:
19142 case TYPE_MTJMPR:
19143 case TYPE_MFJMPR:
19144 case TYPE_IDIV:
19145 case TYPE_LDIV:
19146 case TYPE_LOAD_L:
19147 case TYPE_STORE_C:
19148 case TYPE_ISYNC:
19149 case TYPE_SYNC:
19150 return true;
19151 default:
19152 break;
19153 }
19154 break;
19155 case PROCESSOR_POWER6:
19156 type = get_attr_type (insn);
19157
19158 switch (type)
19159 {
19160 case TYPE_INSERT_DWORD:
19161 case TYPE_EXTS:
19162 case TYPE_CNTLZ:
19163 case TYPE_SHIFT:
19164 case TYPE_VAR_SHIFT_ROTATE:
19165 case TYPE_TRAP:
19166 case TYPE_IMUL:
19167 case TYPE_IMUL2:
19168 case TYPE_IMUL3:
19169 case TYPE_LMUL:
19170 case TYPE_IDIV:
19171 case TYPE_INSERT_WORD:
19172 case TYPE_DELAYED_COMPARE:
19173 case TYPE_IMUL_COMPARE:
19174 case TYPE_LMUL_COMPARE:
19175 case TYPE_FPCOMPARE:
19176 case TYPE_MFCR:
19177 case TYPE_MTCR:
19178 case TYPE_MFJMPR:
19179 case TYPE_MTJMPR:
19180 case TYPE_ISYNC:
19181 case TYPE_SYNC:
19182 case TYPE_LOAD_L:
19183 case TYPE_STORE_C:
19184 case TYPE_LOAD_U:
19185 case TYPE_LOAD_UX:
19186 case TYPE_LOAD_EXT_UX:
19187 case TYPE_STORE_U:
19188 case TYPE_STORE_UX:
19189 case TYPE_FPLOAD_U:
19190 case TYPE_FPLOAD_UX:
19191 case TYPE_FPSTORE_U:
19192 case TYPE_FPSTORE_UX:
19193 return true;
19194 default:
19195 break;
19196 }
19197 break;
19198 default:
19199 break;
19200 }
19201
19202 return false;
19203}
19204
19205static bool
19206insn_must_be_last_in_group (rtx insn)
19207{
19208 enum attr_type type;
19209
19210 if (!insn
19211 || insn == NULL_RTX
19212 || GET_CODE (insn) == NOTE
19213 || GET_CODE (PATTERN (insn)) == USE
19214 || GET_CODE (PATTERN (insn)) == CLOBBER)
19215 return false;
19216
19217 switch (rs6000_cpu) {
19218 case PROCESSOR_POWER4:
19219 case PROCESSOR_POWER5:
19220 if (is_microcoded_insn (insn))
19221 return true;
19222
19223 if (is_branch_slot_insn (insn))
19224 return true;
19225
19226 break;
19227 case PROCESSOR_POWER6:
19228 type = get_attr_type (insn);
19229
19230 switch (type)
19231 {
19232 case TYPE_EXTS:
19233 case TYPE_CNTLZ:
19234 case TYPE_SHIFT:
19235 case TYPE_VAR_SHIFT_ROTATE:
19236 case TYPE_TRAP:
19237 case TYPE_IMUL:
19238 case TYPE_IMUL2:
19239 case TYPE_IMUL3:
19240 case TYPE_LMUL:
19241 case TYPE_IDIV:
19242 case TYPE_DELAYED_COMPARE:
19243 case TYPE_IMUL_COMPARE:
19244 case TYPE_LMUL_COMPARE:
19245 case TYPE_FPCOMPARE:
19246 case TYPE_MFCR:
19247 case TYPE_MTCR:
19248 case TYPE_MFJMPR:
19249 case TYPE_MTJMPR:
19250 case TYPE_ISYNC:
19251 case TYPE_SYNC:
19252 case TYPE_LOAD_L:
19253 case TYPE_STORE_C:
19254 return true;
19255 default:
19256 break;
cbe26ab8 19257 }
44cd321e
PS
19258 break;
19259 default:
19260 break;
19261 }
cbe26ab8
DN
19262
19263 return false;
19264}
19265
839a4992 19266/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19267 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19268
19269static bool
19270is_costly_group (rtx *group_insns, rtx next_insn)
19271{
19272 int i;
cbe26ab8
DN
19273 int issue_rate = rs6000_issue_rate ();
19274
19275 for (i = 0; i < issue_rate; i++)
19276 {
e2f6ff94
MK
19277 sd_iterator_def sd_it;
19278 dep_t dep;
cbe26ab8 19279 rtx insn = group_insns[i];
b198261f 19280
cbe26ab8 19281 if (!insn)
c4ad648e 19282 continue;
b198261f 19283
e2f6ff94 19284 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19285 {
b198261f
MK
19286 rtx next = DEP_CON (dep);
19287
19288 if (next == next_insn
19289 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19290 return true;
c4ad648e 19291 }
cbe26ab8
DN
19292 }
19293
19294 return false;
19295}
19296
f676971a 19297/* Utility of the function redefine_groups.
cbe26ab8
DN
19298 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19299 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19300 to keep it "far" (in a separate group) from GROUP_INSNS, following
19301 one of the following schemes, depending on the value of the flag
19302 -minsert_sched_nops = X:
19303 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19304 in order to force NEXT_INSN into a separate group.
f676971a
EC
19305 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19306 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19307 insertion (has a group just ended, how many vacant issue slots remain in the
19308 last group, and how many dispatch groups were encountered so far). */
19309
f676971a 19310static int
c4ad648e
AM
19311force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19312 rtx next_insn, bool *group_end, int can_issue_more,
19313 int *group_count)
cbe26ab8
DN
19314{
19315 rtx nop;
19316 bool force;
19317 int issue_rate = rs6000_issue_rate ();
19318 bool end = *group_end;
19319 int i;
19320
19321 if (next_insn == NULL_RTX)
19322 return can_issue_more;
19323
19324 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19325 return can_issue_more;
19326
19327 force = is_costly_group (group_insns, next_insn);
19328 if (!force)
19329 return can_issue_more;
19330
19331 if (sched_verbose > 6)
19332 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19333 *group_count ,can_issue_more);
cbe26ab8
DN
19334
19335 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19336 {
19337 if (*group_end)
c4ad648e 19338 can_issue_more = 0;
cbe26ab8
DN
19339
19340 /* Since only a branch can be issued in the last issue_slot, it is
19341 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19342 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19343 in this case the last nop will start a new group and the branch
19344 will be forced to the new group. */
cbe26ab8 19345 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19346 can_issue_more--;
cbe26ab8
DN
19347
19348 while (can_issue_more > 0)
c4ad648e 19349 {
9390387d 19350 nop = gen_nop ();
c4ad648e
AM
19351 emit_insn_before (nop, next_insn);
19352 can_issue_more--;
19353 }
cbe26ab8
DN
19354
19355 *group_end = true;
19356 return 0;
f676971a 19357 }
cbe26ab8
DN
19358
19359 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19360 {
19361 int n_nops = rs6000_sched_insert_nops;
19362
f676971a 19363 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19364 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19365 if (can_issue_more == 0)
c4ad648e 19366 can_issue_more = issue_rate;
cbe26ab8
DN
19367 can_issue_more--;
19368 if (can_issue_more == 0)
c4ad648e
AM
19369 {
19370 can_issue_more = issue_rate - 1;
19371 (*group_count)++;
19372 end = true;
19373 for (i = 0; i < issue_rate; i++)
19374 {
19375 group_insns[i] = 0;
19376 }
19377 }
cbe26ab8
DN
19378
19379 while (n_nops > 0)
c4ad648e
AM
19380 {
19381 nop = gen_nop ();
19382 emit_insn_before (nop, next_insn);
19383 if (can_issue_more == issue_rate - 1) /* new group begins */
19384 end = false;
19385 can_issue_more--;
19386 if (can_issue_more == 0)
19387 {
19388 can_issue_more = issue_rate - 1;
19389 (*group_count)++;
19390 end = true;
19391 for (i = 0; i < issue_rate; i++)
19392 {
19393 group_insns[i] = 0;
19394 }
19395 }
19396 n_nops--;
19397 }
cbe26ab8
DN
19398
19399 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19400 can_issue_more++;
cbe26ab8 19401
c4ad648e
AM
19402 /* Is next_insn going to start a new group? */
19403 *group_end
19404 = (end
cbe26ab8
DN
19405 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19406 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19407 || (can_issue_more < issue_rate &&
c4ad648e 19408 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19409 if (*group_end && end)
c4ad648e 19410 (*group_count)--;
cbe26ab8
DN
19411
19412 if (sched_verbose > 6)
c4ad648e
AM
19413 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19414 *group_count, can_issue_more);
f676971a
EC
19415 return can_issue_more;
19416 }
cbe26ab8
DN
19417
19418 return can_issue_more;
19419}
19420
19421/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19422 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19423 form in practice. It tries to achieve this synchronization by forcing the
19424 estimated processor grouping on the compiler (as opposed to the function
19425 'pad_goups' which tries to force the scheduler's grouping on the processor).
19426
19427 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19428 examines the (estimated) dispatch groups that will be formed by the processor
19429 dispatcher. It marks these group boundaries to reflect the estimated
19430 processor grouping, overriding the grouping that the scheduler had marked.
19431 Depending on the value of the flag '-minsert-sched-nops' this function can
19432 force certain insns into separate groups or force a certain distance between
19433 them by inserting nops, for example, if there exists a "costly dependence"
19434 between the insns.
19435
19436 The function estimates the group boundaries that the processor will form as
0fa2e4df 19437 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19438 each insn. A subsequent insn will start a new group if one of the following
19439 4 cases applies:
19440 - no more vacant issue slots remain in the current dispatch group.
19441 - only the last issue slot, which is the branch slot, is vacant, but the next
19442 insn is not a branch.
19443 - only the last 2 or less issue slots, including the branch slot, are vacant,
19444 which means that a cracked insn (which occupies two issue slots) can't be
19445 issued in this group.
f676971a 19446 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19447 start a new group. */
19448
19449static int
19450redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19451{
19452 rtx insn, next_insn;
19453 int issue_rate;
19454 int can_issue_more;
19455 int slot, i;
19456 bool group_end;
19457 int group_count = 0;
19458 rtx *group_insns;
19459
19460 /* Initialize. */
19461 issue_rate = rs6000_issue_rate ();
19462 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19463 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19464 {
19465 group_insns[i] = 0;
19466 }
19467 can_issue_more = issue_rate;
19468 slot = 0;
19469 insn = get_next_active_insn (prev_head_insn, tail);
19470 group_end = false;
19471
19472 while (insn != NULL_RTX)
19473 {
19474 slot = (issue_rate - can_issue_more);
19475 group_insns[slot] = insn;
19476 can_issue_more =
c4ad648e 19477 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19478 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19479 can_issue_more = 0;
cbe26ab8
DN
19480
19481 next_insn = get_next_active_insn (insn, tail);
19482 if (next_insn == NULL_RTX)
c4ad648e 19483 return group_count + 1;
cbe26ab8 19484
c4ad648e
AM
19485 /* Is next_insn going to start a new group? */
19486 group_end
19487 = (can_issue_more == 0
19488 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19489 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19490 || (can_issue_more < issue_rate &&
19491 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19492
f676971a 19493 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19494 next_insn, &group_end, can_issue_more,
19495 &group_count);
cbe26ab8
DN
19496
19497 if (group_end)
c4ad648e
AM
19498 {
19499 group_count++;
19500 can_issue_more = 0;
19501 for (i = 0; i < issue_rate; i++)
19502 {
19503 group_insns[i] = 0;
19504 }
19505 }
cbe26ab8
DN
19506
19507 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19508 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19509 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19510 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19511
19512 insn = next_insn;
19513 if (can_issue_more == 0)
c4ad648e
AM
19514 can_issue_more = issue_rate;
19515 } /* while */
cbe26ab8
DN
19516
19517 return group_count;
19518}
19519
19520/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19521 dispatch group boundaries that the scheduler had marked. Pad with nops
19522 any dispatch groups which have vacant issue slots, in order to force the
19523 scheduler's grouping on the processor dispatcher. The function
19524 returns the number of dispatch groups found. */
19525
19526static int
19527pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19528{
19529 rtx insn, next_insn;
19530 rtx nop;
19531 int issue_rate;
19532 int can_issue_more;
19533 int group_end;
19534 int group_count = 0;
19535
19536 /* Initialize issue_rate. */
19537 issue_rate = rs6000_issue_rate ();
19538 can_issue_more = issue_rate;
19539
19540 insn = get_next_active_insn (prev_head_insn, tail);
19541 next_insn = get_next_active_insn (insn, tail);
19542
19543 while (insn != NULL_RTX)
19544 {
19545 can_issue_more =
19546 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19547
19548 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19549
19550 if (next_insn == NULL_RTX)
c4ad648e 19551 break;
cbe26ab8
DN
19552
19553 if (group_end)
c4ad648e
AM
19554 {
19555 /* If the scheduler had marked group termination at this location
19556 (between insn and next_indn), and neither insn nor next_insn will
19557 force group termination, pad the group with nops to force group
19558 termination. */
19559 if (can_issue_more
19560 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19561 && !insn_terminates_group_p (insn, current_group)
19562 && !insn_terminates_group_p (next_insn, previous_group))
19563 {
9390387d 19564 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19565 can_issue_more--;
19566
19567 while (can_issue_more)
19568 {
19569 nop = gen_nop ();
19570 emit_insn_before (nop, next_insn);
19571 can_issue_more--;
19572 }
19573 }
19574
19575 can_issue_more = issue_rate;
19576 group_count++;
19577 }
cbe26ab8
DN
19578
19579 insn = next_insn;
19580 next_insn = get_next_active_insn (insn, tail);
19581 }
19582
19583 return group_count;
19584}
19585
44cd321e
PS
19586/* We're beginning a new block. Initialize data structures as necessary. */
19587
19588static void
19589rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19590 int sched_verbose ATTRIBUTE_UNUSED,
19591 int max_ready ATTRIBUTE_UNUSED)
982afe02 19592{
44cd321e
PS
19593 last_scheduled_insn = NULL_RTX;
19594 load_store_pendulum = 0;
19595}
19596
cbe26ab8
DN
19597/* The following function is called at the end of scheduling BB.
19598 After reload, it inserts nops at insn group bundling. */
19599
19600static void
38f391a5 19601rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19602{
19603 int n_groups;
19604
19605 if (sched_verbose)
19606 fprintf (dump, "=== Finishing schedule.\n");
19607
ec507f2d 19608 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19609 {
19610 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19611 return;
cbe26ab8
DN
19612
19613 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19614 n_groups = pad_groups (dump, sched_verbose,
19615 current_sched_info->prev_head,
19616 current_sched_info->next_tail);
cbe26ab8 19617 else
c4ad648e
AM
19618 n_groups = redefine_groups (dump, sched_verbose,
19619 current_sched_info->prev_head,
19620 current_sched_info->next_tail);
cbe26ab8
DN
19621
19622 if (sched_verbose >= 6)
19623 {
19624 fprintf (dump, "ngroups = %d\n", n_groups);
19625 print_rtl (dump, current_sched_info->prev_head);
19626 fprintf (dump, "Done finish_sched\n");
19627 }
19628 }
19629}
b6c9286a 19630\f
b6c9286a
MM
19631/* Length in units of the trampoline for entering a nested function. */
19632
19633int
863d938c 19634rs6000_trampoline_size (void)
b6c9286a
MM
19635{
19636 int ret = 0;
19637
19638 switch (DEFAULT_ABI)
19639 {
19640 default:
37409796 19641 gcc_unreachable ();
b6c9286a
MM
19642
19643 case ABI_AIX:
8f802bfb 19644 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19645 break;
19646
4dabc42d 19647 case ABI_DARWIN:
b6c9286a 19648 case ABI_V4:
03a7e1a5 19649 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19650 break;
b6c9286a
MM
19651 }
19652
19653 return ret;
19654}
19655
19656/* Emit RTL insns to initialize the variable parts of a trampoline.
19657 FNADDR is an RTX for the address of the function's pure code.
19658 CXT is an RTX for the static chain value for the function. */
19659
19660void
a2369ed3 19661rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19662{
8bd04c56 19663 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19664 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19665
19666 switch (DEFAULT_ABI)
19667 {
19668 default:
37409796 19669 gcc_unreachable ();
b6c9286a 19670
8bd04c56 19671/* Macros to shorten the code expansions below. */
9613eaff 19672#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19673#define MEM_PLUS(addr,offset) \
9613eaff 19674 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19675
b6c9286a
MM
19676 /* Under AIX, just build the 3 word function descriptor */
19677 case ABI_AIX:
8bd04c56 19678 {
9613eaff
SH
19679 rtx fn_reg = gen_reg_rtx (Pmode);
19680 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19681 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19682 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19683 emit_move_insn (MEM_DEREF (addr), fn_reg);
19684 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19685 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19686 }
b6c9286a
MM
19687 break;
19688
4dabc42d
TC
19689 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19690 case ABI_DARWIN:
b6c9286a 19691 case ABI_V4:
9613eaff 19692 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19693 FALSE, VOIDmode, 4,
9613eaff 19694 addr, Pmode,
eaf1bcf1 19695 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19696 fnaddr, Pmode,
19697 ctx_reg, Pmode);
b6c9286a 19698 break;
b6c9286a
MM
19699 }
19700
19701 return;
19702}
7509c759
MM
19703
19704\f
91d231cb 19705/* Table of valid machine attributes. */
a4f6c312 19706
91d231cb 19707const struct attribute_spec rs6000_attribute_table[] =
7509c759 19708{
91d231cb 19709 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19710 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19711 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19712 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19713 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19714 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19715#ifdef SUBTARGET_ATTRIBUTE_TABLE
19716 SUBTARGET_ATTRIBUTE_TABLE,
19717#endif
a5c76ee6 19718 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19719};
7509c759 19720
8bb418a3
ZL
19721/* Handle the "altivec" attribute. The attribute may have
19722 arguments as follows:
f676971a 19723
8bb418a3
ZL
19724 __attribute__((altivec(vector__)))
19725 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19726 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19727
19728 and may appear more than once (e.g., 'vector bool char') in a
19729 given declaration. */
19730
19731static tree
f90ac3f0
UP
19732rs6000_handle_altivec_attribute (tree *node,
19733 tree name ATTRIBUTE_UNUSED,
19734 tree args,
8bb418a3
ZL
19735 int flags ATTRIBUTE_UNUSED,
19736 bool *no_add_attrs)
19737{
19738 tree type = *node, result = NULL_TREE;
19739 enum machine_mode mode;
19740 int unsigned_p;
19741 char altivec_type
19742 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19743 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19744 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19745 : '?');
8bb418a3
ZL
19746
19747 while (POINTER_TYPE_P (type)
19748 || TREE_CODE (type) == FUNCTION_TYPE
19749 || TREE_CODE (type) == METHOD_TYPE
19750 || TREE_CODE (type) == ARRAY_TYPE)
19751 type = TREE_TYPE (type);
19752
19753 mode = TYPE_MODE (type);
19754
f90ac3f0
UP
19755 /* Check for invalid AltiVec type qualifiers. */
19756 if (type == long_unsigned_type_node || type == long_integer_type_node)
19757 {
19758 if (TARGET_64BIT)
19759 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19760 else if (rs6000_warn_altivec_long)
d4ee4d25 19761 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19762 }
19763 else if (type == long_long_unsigned_type_node
19764 || type == long_long_integer_type_node)
19765 error ("use of %<long long%> in AltiVec types is invalid");
19766 else if (type == double_type_node)
19767 error ("use of %<double%> in AltiVec types is invalid");
19768 else if (type == long_double_type_node)
19769 error ("use of %<long double%> in AltiVec types is invalid");
19770 else if (type == boolean_type_node)
19771 error ("use of boolean types in AltiVec types is invalid");
19772 else if (TREE_CODE (type) == COMPLEX_TYPE)
19773 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19774 else if (DECIMAL_FLOAT_MODE_P (mode))
19775 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19776
19777 switch (altivec_type)
19778 {
19779 case 'v':
8df83eae 19780 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19781 switch (mode)
19782 {
c4ad648e
AM
19783 case SImode:
19784 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19785 break;
19786 case HImode:
19787 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19788 break;
19789 case QImode:
19790 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19791 break;
19792 case SFmode: result = V4SF_type_node; break;
19793 /* If the user says 'vector int bool', we may be handed the 'bool'
19794 attribute _before_ the 'vector' attribute, and so select the
19795 proper type in the 'b' case below. */
19796 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19797 result = type;
19798 default: break;
8bb418a3
ZL
19799 }
19800 break;
19801 case 'b':
19802 switch (mode)
19803 {
c4ad648e
AM
19804 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19805 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19806 case QImode: case V16QImode: result = bool_V16QI_type_node;
19807 default: break;
8bb418a3
ZL
19808 }
19809 break;
19810 case 'p':
19811 switch (mode)
19812 {
c4ad648e
AM
19813 case V8HImode: result = pixel_V8HI_type_node;
19814 default: break;
8bb418a3
ZL
19815 }
19816 default: break;
19817 }
19818
7958a2a6
FJ
19819 if (result && result != type && TYPE_READONLY (type))
19820 result = build_qualified_type (result, TYPE_QUAL_CONST);
19821
8bb418a3
ZL
19822 *no_add_attrs = true; /* No need to hang on to the attribute. */
19823
f90ac3f0 19824 if (result)
5dc11954 19825 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
19826
19827 return NULL_TREE;
19828}
19829
f18eca82
ZL
19830/* AltiVec defines four built-in scalar types that serve as vector
19831 elements; we must teach the compiler how to mangle them. */
19832
19833static const char *
3101faab 19834rs6000_mangle_type (const_tree type)
f18eca82 19835{
608063c3
JB
19836 type = TYPE_MAIN_VARIANT (type);
19837
19838 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19839 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19840 return NULL;
19841
f18eca82
ZL
19842 if (type == bool_char_type_node) return "U6__boolc";
19843 if (type == bool_short_type_node) return "U6__bools";
19844 if (type == pixel_type_node) return "u7__pixel";
19845 if (type == bool_int_type_node) return "U6__booli";
19846
337bde91
DE
19847 /* Mangle IBM extended float long double as `g' (__float128) on
19848 powerpc*-linux where long-double-64 previously was the default. */
19849 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19850 && TARGET_ELF
19851 && TARGET_LONG_DOUBLE_128
19852 && !TARGET_IEEEQUAD)
19853 return "g";
19854
f18eca82
ZL
19855 /* For all other types, use normal C++ mangling. */
19856 return NULL;
19857}
19858
a5c76ee6
ZW
19859/* Handle a "longcall" or "shortcall" attribute; arguments as in
19860 struct attribute_spec.handler. */
a4f6c312 19861
91d231cb 19862static tree
f676971a
EC
19863rs6000_handle_longcall_attribute (tree *node, tree name,
19864 tree args ATTRIBUTE_UNUSED,
19865 int flags ATTRIBUTE_UNUSED,
a2369ed3 19866 bool *no_add_attrs)
91d231cb
JM
19867{
19868 if (TREE_CODE (*node) != FUNCTION_TYPE
19869 && TREE_CODE (*node) != FIELD_DECL
19870 && TREE_CODE (*node) != TYPE_DECL)
19871 {
5c498b10 19872 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19873 IDENTIFIER_POINTER (name));
19874 *no_add_attrs = true;
19875 }
6a4cee5f 19876
91d231cb 19877 return NULL_TREE;
7509c759
MM
19878}
19879
a5c76ee6
ZW
19880/* Set longcall attributes on all functions declared when
19881 rs6000_default_long_calls is true. */
19882static void
a2369ed3 19883rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19884{
19885 if (rs6000_default_long_calls
19886 && (TREE_CODE (type) == FUNCTION_TYPE
19887 || TREE_CODE (type) == METHOD_TYPE))
19888 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19889 NULL_TREE,
19890 TYPE_ATTRIBUTES (type));
16d6f994
EC
19891
19892#if TARGET_MACHO
19893 darwin_set_default_type_attributes (type);
19894#endif
a5c76ee6
ZW
19895}
19896
3cb999d8
DE
19897/* Return a reference suitable for calling a function with the
19898 longcall attribute. */
a4f6c312 19899
9390387d 19900rtx
a2369ed3 19901rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19902{
d330fd93 19903 const char *call_name;
6a4cee5f
MM
19904 tree node;
19905
19906 if (GET_CODE (call_ref) != SYMBOL_REF)
19907 return call_ref;
19908
19909 /* System V adds '.' to the internal name, so skip them. */
19910 call_name = XSTR (call_ref, 0);
19911 if (*call_name == '.')
19912 {
19913 while (*call_name == '.')
19914 call_name++;
19915
19916 node = get_identifier (call_name);
39403d82 19917 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19918 }
19919
19920 return force_reg (Pmode, call_ref);
19921}
7509c759 19922\f
77ccdfed
EC
19923#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19924#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19925#endif
19926
19927/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19928 struct attribute_spec.handler. */
19929static tree
19930rs6000_handle_struct_attribute (tree *node, tree name,
19931 tree args ATTRIBUTE_UNUSED,
19932 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19933{
19934 tree *type = NULL;
19935 if (DECL_P (*node))
19936 {
19937 if (TREE_CODE (*node) == TYPE_DECL)
19938 type = &TREE_TYPE (*node);
19939 }
19940 else
19941 type = node;
19942
19943 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19944 || TREE_CODE (*type) == UNION_TYPE)))
19945 {
19946 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19947 *no_add_attrs = true;
19948 }
19949
19950 else if ((is_attribute_p ("ms_struct", name)
19951 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19952 || ((is_attribute_p ("gcc_struct", name)
19953 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19954 {
19955 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19956 IDENTIFIER_POINTER (name));
19957 *no_add_attrs = true;
19958 }
19959
19960 return NULL_TREE;
19961}
19962
19963static bool
3101faab 19964rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19965{
19966 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19967 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19968 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19969}
19970\f
b64a1b53
RH
19971#ifdef USING_ELFOS_H
19972
d6b5193b 19973/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19974
d6b5193b
RS
19975static void
19976rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19977{
19978 if (DEFAULT_ABI == ABI_AIX
19979 && TARGET_MINIMAL_TOC
19980 && !TARGET_RELOCATABLE)
19981 {
19982 if (!toc_initialized)
19983 {
19984 toc_initialized = 1;
19985 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19986 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19987 fprintf (asm_out_file, "\t.tc ");
19988 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19989 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19990 fprintf (asm_out_file, "\n");
19991
19992 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19993 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19994 fprintf (asm_out_file, " = .+32768\n");
19995 }
19996 else
19997 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19998 }
19999 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20000 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20001 else
20002 {
20003 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20004 if (!toc_initialized)
20005 {
20006 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20007 fprintf (asm_out_file, " = .+32768\n");
20008 toc_initialized = 1;
20009 }
20010 }
20011}
20012
20013/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20014
b64a1b53 20015static void
d6b5193b
RS
20016rs6000_elf_asm_init_sections (void)
20017{
20018 toc_section
20019 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20020
20021 sdata2_section
20022 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20023 SDATA2_SECTION_ASM_OP);
20024}
20025
20026/* Implement TARGET_SELECT_RTX_SECTION. */
20027
20028static section *
f676971a 20029rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20030 unsigned HOST_WIDE_INT align)
7509c759 20031{
a9098fd0 20032 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20033 return toc_section;
7509c759 20034 else
d6b5193b 20035 return default_elf_select_rtx_section (mode, x, align);
7509c759 20036}
d9407988 20037\f
d1908feb
JJ
20038/* For a SYMBOL_REF, set generic flags and then perform some
20039 target-specific processing.
20040
d1908feb
JJ
20041 When the AIX ABI is requested on a non-AIX system, replace the
20042 function name with the real name (with a leading .) rather than the
20043 function descriptor name. This saves a lot of overriding code to
20044 read the prefixes. */
d9407988 20045
fb49053f 20046static void
a2369ed3 20047rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20048{
d1908feb 20049 default_encode_section_info (decl, rtl, first);
b2003250 20050
d1908feb
JJ
20051 if (first
20052 && TREE_CODE (decl) == FUNCTION_DECL
20053 && !TARGET_AIX
20054 && DEFAULT_ABI == ABI_AIX)
d9407988 20055 {
c6a2438a 20056 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
20057 size_t len = strlen (XSTR (sym_ref, 0));
20058 char *str = alloca (len + 2);
20059 str[0] = '.';
20060 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20061 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20062 }
d9407988
MM
20063}
20064
21d9bb3f
PB
20065static inline bool
20066compare_section_name (const char *section, const char *template)
20067{
20068 int len;
20069
20070 len = strlen (template);
20071 return (strncmp (section, template, len) == 0
20072 && (section[len] == 0 || section[len] == '.'));
20073}
20074
c1b7d95a 20075bool
3101faab 20076rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20077{
20078 if (rs6000_sdata == SDATA_NONE)
20079 return false;
20080
7482ad25
AF
20081 /* We want to merge strings, so we never consider them small data. */
20082 if (TREE_CODE (decl) == STRING_CST)
20083 return false;
20084
20085 /* Functions are never in the small data area. */
20086 if (TREE_CODE (decl) == FUNCTION_DECL)
20087 return false;
20088
0e5dbd9b
DE
20089 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20090 {
20091 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20092 if (compare_section_name (section, ".sdata")
20093 || compare_section_name (section, ".sdata2")
20094 || compare_section_name (section, ".gnu.linkonce.s")
20095 || compare_section_name (section, ".sbss")
20096 || compare_section_name (section, ".sbss2")
20097 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20098 || strcmp (section, ".PPC.EMB.sdata0") == 0
20099 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20100 return true;
20101 }
20102 else
20103 {
20104 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20105
20106 if (size > 0
307b599c 20107 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20108 /* If it's not public, and we're not going to reference it there,
20109 there's no need to put it in the small data section. */
0e5dbd9b
DE
20110 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20111 return true;
20112 }
20113
20114 return false;
20115}
20116
b91da81f 20117#endif /* USING_ELFOS_H */
aacd3885
RS
20118\f
20119/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20120
aacd3885 20121static bool
3101faab 20122rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20123{
20124 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20125}
a6c2a102 20126\f
000034eb 20127/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20128 ADDR can be effectively incremented by incrementing REG.
20129
20130 r0 is special and we must not select it as an address
20131 register by this routine since our caller will try to
20132 increment the returned register via an "la" instruction. */
000034eb 20133
9390387d 20134rtx
a2369ed3 20135find_addr_reg (rtx addr)
000034eb
DE
20136{
20137 while (GET_CODE (addr) == PLUS)
20138 {
02441cd6
JL
20139 if (GET_CODE (XEXP (addr, 0)) == REG
20140 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20141 addr = XEXP (addr, 0);
02441cd6
JL
20142 else if (GET_CODE (XEXP (addr, 1)) == REG
20143 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20144 addr = XEXP (addr, 1);
20145 else if (CONSTANT_P (XEXP (addr, 0)))
20146 addr = XEXP (addr, 1);
20147 else if (CONSTANT_P (XEXP (addr, 1)))
20148 addr = XEXP (addr, 0);
20149 else
37409796 20150 gcc_unreachable ();
000034eb 20151 }
37409796
NS
20152 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20153 return addr;
000034eb
DE
20154}
20155
a6c2a102 20156void
a2369ed3 20157rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20158{
20159 fatal_insn ("bad address", op);
20160}
c8023011 20161
ee890fe2
SS
20162#if TARGET_MACHO
20163
efdba735 20164static tree branch_island_list = 0;
ee890fe2 20165
efdba735
SH
20166/* Remember to generate a branch island for far calls to the given
20167 function. */
ee890fe2 20168
f676971a 20169static void
c4ad648e
AM
20170add_compiler_branch_island (tree label_name, tree function_name,
20171 int line_number)
ee890fe2 20172{
efdba735 20173 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20174 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20175 TREE_CHAIN (branch_island) = branch_island_list;
20176 branch_island_list = branch_island;
ee890fe2
SS
20177}
20178
efdba735
SH
20179#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20180#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20181#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20182 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20183
efdba735
SH
20184/* Generate far-jump branch islands for everything on the
20185 branch_island_list. Invoked immediately after the last instruction
20186 of the epilogue has been emitted; the branch-islands must be
20187 appended to, and contiguous with, the function body. Mach-O stubs
20188 are generated in machopic_output_stub(). */
ee890fe2 20189
efdba735
SH
20190static void
20191macho_branch_islands (void)
20192{
20193 char tmp_buf[512];
20194 tree branch_island;
20195
20196 for (branch_island = branch_island_list;
20197 branch_island;
20198 branch_island = TREE_CHAIN (branch_island))
20199 {
20200 const char *label =
20201 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20202 const char *name =
11abc112 20203 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20204 char name_buf[512];
20205 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20206 if (name[0] == '*' || name[0] == '&')
20207 strcpy (name_buf, name+1);
20208 else
20209 {
20210 name_buf[0] = '_';
20211 strcpy (name_buf+1, name);
20212 }
20213 strcpy (tmp_buf, "\n");
20214 strcat (tmp_buf, label);
ee890fe2 20215#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20216 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20217 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20218#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20219 if (flag_pic)
20220 {
20221 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20222 strcat (tmp_buf, label);
20223 strcat (tmp_buf, "_pic\n");
20224 strcat (tmp_buf, label);
20225 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20226
efdba735
SH
20227 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20228 strcat (tmp_buf, name_buf);
20229 strcat (tmp_buf, " - ");
20230 strcat (tmp_buf, label);
20231 strcat (tmp_buf, "_pic)\n");
f676971a 20232
efdba735 20233 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20234
efdba735
SH
20235 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20236 strcat (tmp_buf, name_buf);
20237 strcat (tmp_buf, " - ");
20238 strcat (tmp_buf, label);
20239 strcat (tmp_buf, "_pic)\n");
f676971a 20240
efdba735
SH
20241 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20242 }
20243 else
20244 {
20245 strcat (tmp_buf, ":\nlis r12,hi16(");
20246 strcat (tmp_buf, name_buf);
20247 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20248 strcat (tmp_buf, name_buf);
20249 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20250 }
20251 output_asm_insn (tmp_buf, 0);
ee890fe2 20252#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20253 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20254 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20255#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20256 }
ee890fe2 20257
efdba735 20258 branch_island_list = 0;
ee890fe2
SS
20259}
20260
20261/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20262 already there or not. */
20263
efdba735 20264static int
a2369ed3 20265no_previous_def (tree function_name)
ee890fe2 20266{
efdba735
SH
20267 tree branch_island;
20268 for (branch_island = branch_island_list;
20269 branch_island;
20270 branch_island = TREE_CHAIN (branch_island))
20271 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20272 return 0;
20273 return 1;
20274}
20275
20276/* GET_PREV_LABEL gets the label name from the previous definition of
20277 the function. */
20278
efdba735 20279static tree
a2369ed3 20280get_prev_label (tree function_name)
ee890fe2 20281{
efdba735
SH
20282 tree branch_island;
20283 for (branch_island = branch_island_list;
20284 branch_island;
20285 branch_island = TREE_CHAIN (branch_island))
20286 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20287 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20288 return 0;
20289}
20290
75b1b789
MS
20291#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20292#define DARWIN_LINKER_GENERATES_ISLANDS 0
20293#endif
20294
20295/* KEXTs still need branch islands. */
20296#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20297 || flag_mkernel || flag_apple_kext)
20298
ee890fe2 20299/* INSN is either a function call or a millicode call. It may have an
f676971a 20300 unconditional jump in its delay slot.
ee890fe2
SS
20301
20302 CALL_DEST is the routine we are calling. */
20303
20304char *
c4ad648e
AM
20305output_call (rtx insn, rtx *operands, int dest_operand_number,
20306 int cookie_operand_number)
ee890fe2
SS
20307{
20308 static char buf[256];
75b1b789
MS
20309 if (DARWIN_GENERATE_ISLANDS
20310 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20311 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20312 {
20313 tree labelname;
efdba735 20314 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20315
ee890fe2
SS
20316 if (no_previous_def (funname))
20317 {
ee890fe2
SS
20318 rtx label_rtx = gen_label_rtx ();
20319 char *label_buf, temp_buf[256];
20320 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20321 CODE_LABEL_NUMBER (label_rtx));
20322 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20323 labelname = get_identifier (label_buf);
a38e7aa5 20324 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20325 }
20326 else
20327 labelname = get_prev_label (funname);
20328
efdba735
SH
20329 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20330 instruction will reach 'foo', otherwise link as 'bl L42'".
20331 "L42" should be a 'branch island', that will do a far jump to
20332 'foo'. Branch islands are generated in
20333 macho_branch_islands(). */
ee890fe2 20334 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20335 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20336 }
20337 else
efdba735
SH
20338 sprintf (buf, "bl %%z%d", dest_operand_number);
20339 return buf;
ee890fe2
SS
20340}
20341
ee890fe2
SS
20342/* Generate PIC and indirect symbol stubs. */
20343
20344void
a2369ed3 20345machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20346{
20347 unsigned int length;
a4f6c312
SS
20348 char *symbol_name, *lazy_ptr_name;
20349 char *local_label_0;
ee890fe2
SS
20350 static int label = 0;
20351
df56a27f 20352 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20353 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20354
ee890fe2 20355
ee890fe2
SS
20356 length = strlen (symb);
20357 symbol_name = alloca (length + 32);
20358 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20359
20360 lazy_ptr_name = alloca (length + 32);
20361 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20362
ee890fe2 20363 if (flag_pic == 2)
56c779bc 20364 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20365 else
56c779bc 20366 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20367
20368 if (flag_pic == 2)
20369 {
d974312d
DJ
20370 fprintf (file, "\t.align 5\n");
20371
20372 fprintf (file, "%s:\n", stub);
20373 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20374
876455fa 20375 label++;
89da1f32 20376 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20377 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20378
ee890fe2
SS
20379 fprintf (file, "\tmflr r0\n");
20380 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20381 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20382 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20383 lazy_ptr_name, local_label_0);
20384 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20385 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20386 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20387 lazy_ptr_name, local_label_0);
20388 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20389 fprintf (file, "\tbctr\n");
20390 }
20391 else
d974312d
DJ
20392 {
20393 fprintf (file, "\t.align 4\n");
20394
20395 fprintf (file, "%s:\n", stub);
20396 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20397
20398 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20399 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20400 (TARGET_64BIT ? "ldu" : "lwzu"),
20401 lazy_ptr_name);
d974312d
DJ
20402 fprintf (file, "\tmtctr r12\n");
20403 fprintf (file, "\tbctr\n");
20404 }
f676971a 20405
56c779bc 20406 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20407 fprintf (file, "%s:\n", lazy_ptr_name);
20408 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20409 fprintf (file, "%sdyld_stub_binding_helper\n",
20410 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20411}
20412
20413/* Legitimize PIC addresses. If the address is already
20414 position-independent, we return ORIG. Newly generated
20415 position-independent addresses go into a reg. This is REG if non
20416 zero, otherwise we allocate register(s) as necessary. */
20417
4fbbe694 20418#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20419
20420rtx
f676971a 20421rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20422 rtx reg)
ee890fe2
SS
20423{
20424 rtx base, offset;
20425
20426 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20427 reg = gen_reg_rtx (Pmode);
20428
20429 if (GET_CODE (orig) == CONST)
20430 {
37409796
NS
20431 rtx reg_temp;
20432
ee890fe2
SS
20433 if (GET_CODE (XEXP (orig, 0)) == PLUS
20434 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20435 return orig;
20436
37409796 20437 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20438
37409796
NS
20439 /* Use a different reg for the intermediate value, as
20440 it will be marked UNCHANGING. */
b3a13419 20441 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20442 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20443 Pmode, reg_temp);
20444 offset =
20445 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20446 Pmode, reg);
bb8df8a6 20447
ee890fe2
SS
20448 if (GET_CODE (offset) == CONST_INT)
20449 {
20450 if (SMALL_INT (offset))
ed8908e7 20451 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20452 else if (! reload_in_progress && ! reload_completed)
20453 offset = force_reg (Pmode, offset);
20454 else
c859cda6
DJ
20455 {
20456 rtx mem = force_const_mem (Pmode, orig);
20457 return machopic_legitimize_pic_address (mem, Pmode, reg);
20458 }
ee890fe2 20459 }
f1c25d3b 20460 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20461 }
20462
20463 /* Fall back on generic machopic code. */
20464 return machopic_legitimize_pic_address (orig, mode, reg);
20465}
20466
c4e18b1c
GK
20467/* Output a .machine directive for the Darwin assembler, and call
20468 the generic start_file routine. */
20469
20470static void
20471rs6000_darwin_file_start (void)
20472{
94ff898d 20473 static const struct
c4e18b1c
GK
20474 {
20475 const char *arg;
20476 const char *name;
20477 int if_set;
20478 } mapping[] = {
55dbfb48 20479 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20480 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20481 { "power4", "ppc970", 0 },
20482 { "G5", "ppc970", 0 },
20483 { "7450", "ppc7450", 0 },
20484 { "7400", "ppc7400", MASK_ALTIVEC },
20485 { "G4", "ppc7400", 0 },
20486 { "750", "ppc750", 0 },
20487 { "740", "ppc750", 0 },
20488 { "G3", "ppc750", 0 },
20489 { "604e", "ppc604e", 0 },
20490 { "604", "ppc604", 0 },
20491 { "603e", "ppc603", 0 },
20492 { "603", "ppc603", 0 },
20493 { "601", "ppc601", 0 },
20494 { NULL, "ppc", 0 } };
20495 const char *cpu_id = "";
20496 size_t i;
94ff898d 20497
9390387d 20498 rs6000_file_start ();
192d0f89 20499 darwin_file_start ();
c4e18b1c
GK
20500
20501 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20502 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20503 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20504 && rs6000_select[i].string[0] != '\0')
20505 cpu_id = rs6000_select[i].string;
20506
20507 /* Look through the mapping array. Pick the first name that either
20508 matches the argument, has a bit set in IF_SET that is also set
20509 in the target flags, or has a NULL name. */
20510
20511 i = 0;
20512 while (mapping[i].arg != NULL
20513 && strcmp (mapping[i].arg, cpu_id) != 0
20514 && (mapping[i].if_set & target_flags) == 0)
20515 i++;
20516
20517 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20518}
20519
ee890fe2 20520#endif /* TARGET_MACHO */
7c262518
RH
20521
20522#if TARGET_ELF
9b580a0b
RH
20523static int
20524rs6000_elf_reloc_rw_mask (void)
7c262518 20525{
9b580a0b
RH
20526 if (flag_pic)
20527 return 3;
20528 else if (DEFAULT_ABI == ABI_AIX)
20529 return 2;
20530 else
20531 return 0;
7c262518 20532}
d9f6800d
RH
20533
20534/* Record an element in the table of global constructors. SYMBOL is
20535 a SYMBOL_REF of the function to be called; PRIORITY is a number
20536 between 0 and MAX_INIT_PRIORITY.
20537
20538 This differs from default_named_section_asm_out_constructor in
20539 that we have special handling for -mrelocatable. */
20540
20541static void
a2369ed3 20542rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20543{
20544 const char *section = ".ctors";
20545 char buf[16];
20546
20547 if (priority != DEFAULT_INIT_PRIORITY)
20548 {
20549 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20550 /* Invert the numbering so the linker puts us in the proper
20551 order; constructors are run from right to left, and the
20552 linker sorts in increasing order. */
20553 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20554 section = buf;
20555 }
20556
d6b5193b 20557 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20558 assemble_align (POINTER_SIZE);
d9f6800d
RH
20559
20560 if (TARGET_RELOCATABLE)
20561 {
20562 fputs ("\t.long (", asm_out_file);
20563 output_addr_const (asm_out_file, symbol);
20564 fputs (")@fixup\n", asm_out_file);
20565 }
20566 else
c8af3574 20567 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20568}
20569
20570static void
a2369ed3 20571rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20572{
20573 const char *section = ".dtors";
20574 char buf[16];
20575
20576 if (priority != DEFAULT_INIT_PRIORITY)
20577 {
20578 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20579 /* Invert the numbering so the linker puts us in the proper
20580 order; constructors are run from right to left, and the
20581 linker sorts in increasing order. */
20582 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20583 section = buf;
20584 }
20585
d6b5193b 20586 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20587 assemble_align (POINTER_SIZE);
d9f6800d
RH
20588
20589 if (TARGET_RELOCATABLE)
20590 {
20591 fputs ("\t.long (", asm_out_file);
20592 output_addr_const (asm_out_file, symbol);
20593 fputs (")@fixup\n", asm_out_file);
20594 }
20595 else
c8af3574 20596 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20597}
9739c90c
JJ
20598
20599void
a2369ed3 20600rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20601{
20602 if (TARGET_64BIT)
20603 {
20604 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20605 ASM_OUTPUT_LABEL (file, name);
20606 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20607 rs6000_output_function_entry (file, name);
20608 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20609 if (DOT_SYMBOLS)
9739c90c 20610 {
85b776df 20611 fputs ("\t.size\t", file);
9739c90c 20612 assemble_name (file, name);
85b776df
AM
20613 fputs (",24\n\t.type\t.", file);
20614 assemble_name (file, name);
20615 fputs (",@function\n", file);
20616 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20617 {
20618 fputs ("\t.globl\t.", file);
20619 assemble_name (file, name);
20620 putc ('\n', file);
20621 }
9739c90c 20622 }
85b776df
AM
20623 else
20624 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20625 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20626 rs6000_output_function_entry (file, name);
20627 fputs (":\n", file);
9739c90c
JJ
20628 return;
20629 }
20630
20631 if (TARGET_RELOCATABLE
7f970b70 20632 && !TARGET_SECURE_PLT
9739c90c 20633 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20634 && uses_TOC ())
9739c90c
JJ
20635 {
20636 char buf[256];
20637
20638 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20639
20640 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20641 fprintf (file, "\t.long ");
20642 assemble_name (file, buf);
20643 putc ('-', file);
20644 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20645 assemble_name (file, buf);
20646 putc ('\n', file);
20647 }
20648
20649 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20650 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20651
20652 if (DEFAULT_ABI == ABI_AIX)
20653 {
20654 const char *desc_name, *orig_name;
20655
20656 orig_name = (*targetm.strip_name_encoding) (name);
20657 desc_name = orig_name;
20658 while (*desc_name == '.')
20659 desc_name++;
20660
20661 if (TREE_PUBLIC (decl))
20662 fprintf (file, "\t.globl %s\n", desc_name);
20663
20664 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20665 fprintf (file, "%s:\n", desc_name);
20666 fprintf (file, "\t.long %s\n", orig_name);
20667 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20668 if (DEFAULT_ABI == ABI_AIX)
20669 fputs ("\t.long 0\n", file);
20670 fprintf (file, "\t.previous\n");
20671 }
20672 ASM_OUTPUT_LABEL (file, name);
20673}
1334b570
AM
20674
20675static void
20676rs6000_elf_end_indicate_exec_stack (void)
20677{
20678 if (TARGET_32BIT)
20679 file_end_indicate_exec_stack ();
20680}
7c262518
RH
20681#endif
20682
cbaaba19 20683#if TARGET_XCOFF
0d5817b2
DE
20684static void
20685rs6000_xcoff_asm_output_anchor (rtx symbol)
20686{
20687 char buffer[100];
20688
20689 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20690 SYMBOL_REF_BLOCK_OFFSET (symbol));
20691 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20692}
20693
7c262518 20694static void
a2369ed3 20695rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20696{
20697 fputs (GLOBAL_ASM_OP, stream);
20698 RS6000_OUTPUT_BASENAME (stream, name);
20699 putc ('\n', stream);
20700}
20701
d6b5193b
RS
20702/* A get_unnamed_decl callback, used for read-only sections. PTR
20703 points to the section string variable. */
20704
20705static void
20706rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20707{
890f9edf
OH
20708 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20709 *(const char *const *) directive,
20710 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20711}
20712
20713/* Likewise for read-write sections. */
20714
20715static void
20716rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20717{
890f9edf
OH
20718 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20719 *(const char *const *) directive,
20720 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20721}
20722
20723/* A get_unnamed_section callback, used for switching to toc_section. */
20724
20725static void
20726rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20727{
20728 if (TARGET_MINIMAL_TOC)
20729 {
20730 /* toc_section is always selected at least once from
20731 rs6000_xcoff_file_start, so this is guaranteed to
20732 always be defined once and only once in each file. */
20733 if (!toc_initialized)
20734 {
20735 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20736 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20737 toc_initialized = 1;
20738 }
20739 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20740 (TARGET_32BIT ? "" : ",3"));
20741 }
20742 else
20743 fputs ("\t.toc\n", asm_out_file);
20744}
20745
20746/* Implement TARGET_ASM_INIT_SECTIONS. */
20747
20748static void
20749rs6000_xcoff_asm_init_sections (void)
20750{
20751 read_only_data_section
20752 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20753 &xcoff_read_only_section_name);
20754
20755 private_data_section
20756 = get_unnamed_section (SECTION_WRITE,
20757 rs6000_xcoff_output_readwrite_section_asm_op,
20758 &xcoff_private_data_section_name);
20759
20760 read_only_private_data_section
20761 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20762 &xcoff_private_data_section_name);
20763
20764 toc_section
20765 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20766
20767 readonly_data_section = read_only_data_section;
20768 exception_section = data_section;
20769}
20770
9b580a0b
RH
20771static int
20772rs6000_xcoff_reloc_rw_mask (void)
20773{
20774 return 3;
20775}
20776
b275d088 20777static void
c18a5b6c
MM
20778rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20779 tree decl ATTRIBUTE_UNUSED)
7c262518 20780{
0e5dbd9b
DE
20781 int smclass;
20782 static const char * const suffix[3] = { "PR", "RO", "RW" };
20783
20784 if (flags & SECTION_CODE)
20785 smclass = 0;
20786 else if (flags & SECTION_WRITE)
20787 smclass = 2;
20788 else
20789 smclass = 1;
20790
5b5198f7 20791 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20792 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20793 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20794}
ae46c4e0 20795
d6b5193b 20796static section *
f676971a 20797rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20798 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20799{
9b580a0b 20800 if (decl_readonly_section (decl, reloc))
ae46c4e0 20801 {
0e5dbd9b 20802 if (TREE_PUBLIC (decl))
d6b5193b 20803 return read_only_data_section;
ae46c4e0 20804 else
d6b5193b 20805 return read_only_private_data_section;
ae46c4e0
RH
20806 }
20807 else
20808 {
0e5dbd9b 20809 if (TREE_PUBLIC (decl))
d6b5193b 20810 return data_section;
ae46c4e0 20811 else
d6b5193b 20812 return private_data_section;
ae46c4e0
RH
20813 }
20814}
20815
20816static void
a2369ed3 20817rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20818{
20819 const char *name;
ae46c4e0 20820
5b5198f7
DE
20821 /* Use select_section for private and uninitialized data. */
20822 if (!TREE_PUBLIC (decl)
20823 || DECL_COMMON (decl)
0e5dbd9b
DE
20824 || DECL_INITIAL (decl) == NULL_TREE
20825 || DECL_INITIAL (decl) == error_mark_node
20826 || (flag_zero_initialized_in_bss
20827 && initializer_zerop (DECL_INITIAL (decl))))
20828 return;
20829
20830 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20831 name = (*targetm.strip_name_encoding) (name);
20832 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20833}
b64a1b53 20834
fb49053f
RH
20835/* Select section for constant in constant pool.
20836
20837 On RS/6000, all constants are in the private read-only data area.
20838 However, if this is being placed in the TOC it must be output as a
20839 toc entry. */
20840
d6b5193b 20841static section *
f676971a 20842rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20843 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20844{
20845 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20846 return toc_section;
b64a1b53 20847 else
d6b5193b 20848 return read_only_private_data_section;
b64a1b53 20849}
772c5265
RH
20850
20851/* Remove any trailing [DS] or the like from the symbol name. */
20852
20853static const char *
a2369ed3 20854rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20855{
20856 size_t len;
20857 if (*name == '*')
20858 name++;
20859 len = strlen (name);
20860 if (name[len - 1] == ']')
20861 return ggc_alloc_string (name, len - 4);
20862 else
20863 return name;
20864}
20865
5add3202
DE
20866/* Section attributes. AIX is always PIC. */
20867
20868static unsigned int
a2369ed3 20869rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20870{
5b5198f7 20871 unsigned int align;
9b580a0b 20872 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20873
20874 /* Align to at least UNIT size. */
20875 if (flags & SECTION_CODE)
20876 align = MIN_UNITS_PER_WORD;
20877 else
20878 /* Increase alignment of large objects if not already stricter. */
20879 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20880 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20881 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20882
20883 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20884}
a5fe455b 20885
1bc7c5b6
ZW
20886/* Output at beginning of assembler file.
20887
20888 Initialize the section names for the RS/6000 at this point.
20889
20890 Specify filename, including full path, to assembler.
20891
20892 We want to go into the TOC section so at least one .toc will be emitted.
20893 Also, in order to output proper .bs/.es pairs, we need at least one static
20894 [RW] section emitted.
20895
20896 Finally, declare mcount when profiling to make the assembler happy. */
20897
20898static void
863d938c 20899rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20900{
20901 rs6000_gen_section_name (&xcoff_bss_section_name,
20902 main_input_filename, ".bss_");
20903 rs6000_gen_section_name (&xcoff_private_data_section_name,
20904 main_input_filename, ".rw_");
20905 rs6000_gen_section_name (&xcoff_read_only_section_name,
20906 main_input_filename, ".ro_");
20907
20908 fputs ("\t.file\t", asm_out_file);
20909 output_quoted_string (asm_out_file, main_input_filename);
20910 fputc ('\n', asm_out_file);
1bc7c5b6 20911 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20912 switch_to_section (private_data_section);
20913 switch_to_section (text_section);
1bc7c5b6
ZW
20914 if (profile_flag)
20915 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20916 rs6000_file_start ();
20917}
20918
a5fe455b
ZW
20919/* Output at end of assembler file.
20920 On the RS/6000, referencing data should automatically pull in text. */
20921
20922static void
863d938c 20923rs6000_xcoff_file_end (void)
a5fe455b 20924{
d6b5193b 20925 switch_to_section (text_section);
a5fe455b 20926 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20927 switch_to_section (data_section);
a5fe455b
ZW
20928 fputs (TARGET_32BIT
20929 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20930 asm_out_file);
20931}
f1384257 20932#endif /* TARGET_XCOFF */
0e5dbd9b 20933
3c50106f
RH
20934/* Compute a (partial) cost for rtx X. Return true if the complete
20935 cost has been computed, and false if subexpressions should be
20936 scanned. In either case, *TOTAL contains the cost result. */
20937
20938static bool
1494c534 20939rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20940{
f0517163
RS
20941 enum machine_mode mode = GET_MODE (x);
20942
3c50106f
RH
20943 switch (code)
20944 {
30a555d9 20945 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20946 case CONST_INT:
066cd967
DE
20947 if (((outer_code == SET
20948 || outer_code == PLUS
20949 || outer_code == MINUS)
279bb624
DE
20950 && (satisfies_constraint_I (x)
20951 || satisfies_constraint_L (x)))
066cd967 20952 || (outer_code == AND
279bb624
DE
20953 && (satisfies_constraint_K (x)
20954 || (mode == SImode
20955 ? satisfies_constraint_L (x)
20956 : satisfies_constraint_J (x))
1990cd79
AM
20957 || mask_operand (x, mode)
20958 || (mode == DImode
20959 && mask64_operand (x, DImode))))
22e54023 20960 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20961 && (satisfies_constraint_K (x)
20962 || (mode == SImode
20963 ? satisfies_constraint_L (x)
20964 : satisfies_constraint_J (x))))
066cd967
DE
20965 || outer_code == ASHIFT
20966 || outer_code == ASHIFTRT
20967 || outer_code == LSHIFTRT
20968 || outer_code == ROTATE
20969 || outer_code == ROTATERT
d5861a7a 20970 || outer_code == ZERO_EXTRACT
066cd967 20971 || (outer_code == MULT
279bb624 20972 && satisfies_constraint_I (x))
22e54023
DE
20973 || ((outer_code == DIV || outer_code == UDIV
20974 || outer_code == MOD || outer_code == UMOD)
20975 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20976 || (outer_code == COMPARE
279bb624
DE
20977 && (satisfies_constraint_I (x)
20978 || satisfies_constraint_K (x)))
22e54023 20979 || (outer_code == EQ
279bb624
DE
20980 && (satisfies_constraint_I (x)
20981 || satisfies_constraint_K (x)
20982 || (mode == SImode
20983 ? satisfies_constraint_L (x)
20984 : satisfies_constraint_J (x))))
22e54023 20985 || (outer_code == GTU
279bb624 20986 && satisfies_constraint_I (x))
22e54023 20987 || (outer_code == LTU
279bb624 20988 && satisfies_constraint_P (x)))
066cd967
DE
20989 {
20990 *total = 0;
20991 return true;
20992 }
20993 else if ((outer_code == PLUS
4ae234b0 20994 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20995 || (outer_code == MINUS
4ae234b0 20996 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20997 || ((outer_code == SET
20998 || outer_code == IOR
20999 || outer_code == XOR)
21000 && (INTVAL (x)
21001 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21002 {
21003 *total = COSTS_N_INSNS (1);
21004 return true;
21005 }
21006 /* FALLTHRU */
21007
21008 case CONST_DOUBLE:
f6fe3a22 21009 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21010 {
f6fe3a22
DE
21011 if ((outer_code == IOR || outer_code == XOR)
21012 && CONST_DOUBLE_HIGH (x) == 0
21013 && (CONST_DOUBLE_LOW (x)
21014 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21015 {
21016 *total = 0;
21017 return true;
21018 }
21019 else if ((outer_code == AND && and64_2_operand (x, DImode))
21020 || ((outer_code == SET
21021 || outer_code == IOR
21022 || outer_code == XOR)
21023 && CONST_DOUBLE_HIGH (x) == 0))
21024 {
21025 *total = COSTS_N_INSNS (1);
21026 return true;
21027 }
066cd967
DE
21028 }
21029 /* FALLTHRU */
21030
3c50106f 21031 case CONST:
066cd967 21032 case HIGH:
3c50106f 21033 case SYMBOL_REF:
066cd967
DE
21034 case MEM:
21035 /* When optimizing for size, MEM should be slightly more expensive
21036 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21037 L1 cache latency is about two instructions. */
066cd967 21038 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21039 return true;
21040
30a555d9
DE
21041 case LABEL_REF:
21042 *total = 0;
21043 return true;
21044
3c50106f 21045 case PLUS:
f0517163 21046 if (mode == DFmode)
066cd967
DE
21047 {
21048 if (GET_CODE (XEXP (x, 0)) == MULT)
21049 {
21050 /* FNMA accounted in outer NEG. */
21051 if (outer_code == NEG)
21052 *total = rs6000_cost->dmul - rs6000_cost->fp;
21053 else
21054 *total = rs6000_cost->dmul;
21055 }
21056 else
21057 *total = rs6000_cost->fp;
21058 }
f0517163 21059 else if (mode == SFmode)
066cd967
DE
21060 {
21061 /* FNMA accounted in outer NEG. */
21062 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21063 *total = 0;
21064 else
21065 *total = rs6000_cost->fp;
21066 }
f0517163 21067 else
066cd967
DE
21068 *total = COSTS_N_INSNS (1);
21069 return false;
3c50106f 21070
52190329 21071 case MINUS:
f0517163 21072 if (mode == DFmode)
066cd967 21073 {
762c919f
JM
21074 if (GET_CODE (XEXP (x, 0)) == MULT
21075 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21076 {
21077 /* FNMA accounted in outer NEG. */
21078 if (outer_code == NEG)
762c919f 21079 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21080 else
21081 *total = rs6000_cost->dmul;
21082 }
21083 else
21084 *total = rs6000_cost->fp;
21085 }
f0517163 21086 else if (mode == SFmode)
066cd967
DE
21087 {
21088 /* FNMA accounted in outer NEG. */
21089 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21090 *total = 0;
21091 else
21092 *total = rs6000_cost->fp;
21093 }
f0517163 21094 else
c4ad648e 21095 *total = COSTS_N_INSNS (1);
066cd967 21096 return false;
3c50106f
RH
21097
21098 case MULT:
c9dbf840 21099 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21100 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21101 {
8b897cfa
RS
21102 if (INTVAL (XEXP (x, 1)) >= -256
21103 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21104 *total = rs6000_cost->mulsi_const9;
8b897cfa 21105 else
06a67bdd 21106 *total = rs6000_cost->mulsi_const;
3c50106f 21107 }
066cd967
DE
21108 /* FMA accounted in outer PLUS/MINUS. */
21109 else if ((mode == DFmode || mode == SFmode)
21110 && (outer_code == PLUS || outer_code == MINUS))
21111 *total = 0;
f0517163 21112 else if (mode == DFmode)
06a67bdd 21113 *total = rs6000_cost->dmul;
f0517163 21114 else if (mode == SFmode)
06a67bdd 21115 *total = rs6000_cost->fp;
f0517163 21116 else if (mode == DImode)
06a67bdd 21117 *total = rs6000_cost->muldi;
8b897cfa 21118 else
06a67bdd 21119 *total = rs6000_cost->mulsi;
066cd967 21120 return false;
3c50106f
RH
21121
21122 case DIV:
21123 case MOD:
f0517163
RS
21124 if (FLOAT_MODE_P (mode))
21125 {
06a67bdd
RS
21126 *total = mode == DFmode ? rs6000_cost->ddiv
21127 : rs6000_cost->sdiv;
066cd967 21128 return false;
f0517163 21129 }
5efb1046 21130 /* FALLTHRU */
3c50106f
RH
21131
21132 case UDIV:
21133 case UMOD:
627b6fe2
DJ
21134 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21135 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21136 {
21137 if (code == DIV || code == MOD)
21138 /* Shift, addze */
21139 *total = COSTS_N_INSNS (2);
21140 else
21141 /* Shift */
21142 *total = COSTS_N_INSNS (1);
21143 }
c4ad648e 21144 else
627b6fe2
DJ
21145 {
21146 if (GET_MODE (XEXP (x, 1)) == DImode)
21147 *total = rs6000_cost->divdi;
21148 else
21149 *total = rs6000_cost->divsi;
21150 }
21151 /* Add in shift and subtract for MOD. */
21152 if (code == MOD || code == UMOD)
21153 *total += COSTS_N_INSNS (2);
066cd967 21154 return false;
3c50106f 21155
32f56aad 21156 case CTZ:
3c50106f
RH
21157 case FFS:
21158 *total = COSTS_N_INSNS (4);
066cd967 21159 return false;
3c50106f 21160
32f56aad
DE
21161 case POPCOUNT:
21162 *total = COSTS_N_INSNS (6);
21163 return false;
21164
06a67bdd 21165 case NOT:
066cd967
DE
21166 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21167 {
21168 *total = 0;
21169 return false;
21170 }
21171 /* FALLTHRU */
21172
21173 case AND:
32f56aad 21174 case CLZ:
066cd967
DE
21175 case IOR:
21176 case XOR:
d5861a7a
DE
21177 case ZERO_EXTRACT:
21178 *total = COSTS_N_INSNS (1);
21179 return false;
21180
066cd967
DE
21181 case ASHIFT:
21182 case ASHIFTRT:
21183 case LSHIFTRT:
21184 case ROTATE:
21185 case ROTATERT:
d5861a7a 21186 /* Handle mul_highpart. */
066cd967
DE
21187 if (outer_code == TRUNCATE
21188 && GET_CODE (XEXP (x, 0)) == MULT)
21189 {
21190 if (mode == DImode)
21191 *total = rs6000_cost->muldi;
21192 else
21193 *total = rs6000_cost->mulsi;
21194 return true;
21195 }
d5861a7a
DE
21196 else if (outer_code == AND)
21197 *total = 0;
21198 else
21199 *total = COSTS_N_INSNS (1);
21200 return false;
21201
21202 case SIGN_EXTEND:
21203 case ZERO_EXTEND:
21204 if (GET_CODE (XEXP (x, 0)) == MEM)
21205 *total = 0;
21206 else
21207 *total = COSTS_N_INSNS (1);
066cd967 21208 return false;
06a67bdd 21209
066cd967
DE
21210 case COMPARE:
21211 case NEG:
21212 case ABS:
21213 if (!FLOAT_MODE_P (mode))
21214 {
21215 *total = COSTS_N_INSNS (1);
21216 return false;
21217 }
21218 /* FALLTHRU */
21219
21220 case FLOAT:
21221 case UNSIGNED_FLOAT:
21222 case FIX:
21223 case UNSIGNED_FIX:
06a67bdd
RS
21224 case FLOAT_TRUNCATE:
21225 *total = rs6000_cost->fp;
066cd967 21226 return false;
06a67bdd 21227
a2af5043
DJ
21228 case FLOAT_EXTEND:
21229 if (mode == DFmode)
21230 *total = 0;
21231 else
21232 *total = rs6000_cost->fp;
21233 return false;
21234
06a67bdd
RS
21235 case UNSPEC:
21236 switch (XINT (x, 1))
21237 {
21238 case UNSPEC_FRSP:
21239 *total = rs6000_cost->fp;
21240 return true;
21241
21242 default:
21243 break;
21244 }
21245 break;
21246
21247 case CALL:
21248 case IF_THEN_ELSE:
21249 if (optimize_size)
21250 {
21251 *total = COSTS_N_INSNS (1);
21252 return true;
21253 }
066cd967
DE
21254 else if (FLOAT_MODE_P (mode)
21255 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21256 {
21257 *total = rs6000_cost->fp;
21258 return false;
21259 }
06a67bdd
RS
21260 break;
21261
c0600ecd
DE
21262 case EQ:
21263 case GTU:
21264 case LTU:
22e54023
DE
21265 /* Carry bit requires mode == Pmode.
21266 NEG or PLUS already counted so only add one. */
21267 if (mode == Pmode
21268 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21269 {
22e54023
DE
21270 *total = COSTS_N_INSNS (1);
21271 return true;
21272 }
21273 if (outer_code == SET)
21274 {
21275 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21276 {
22e54023 21277 *total = COSTS_N_INSNS (2);
c0600ecd 21278 return true;
c0600ecd 21279 }
22e54023
DE
21280 else if (mode == Pmode)
21281 {
21282 *total = COSTS_N_INSNS (3);
21283 return false;
21284 }
21285 }
21286 /* FALLTHRU */
21287
21288 case GT:
21289 case LT:
21290 case UNORDERED:
21291 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21292 {
21293 *total = COSTS_N_INSNS (2);
21294 return true;
c0600ecd 21295 }
22e54023
DE
21296 /* CC COMPARE. */
21297 if (outer_code == COMPARE)
21298 {
21299 *total = 0;
21300 return true;
21301 }
21302 break;
c0600ecd 21303
3c50106f 21304 default:
06a67bdd 21305 break;
3c50106f 21306 }
06a67bdd
RS
21307
21308 return false;
3c50106f
RH
21309}
21310
34bb030a
DE
21311/* A C expression returning the cost of moving data from a register of class
21312 CLASS1 to one of CLASS2. */
21313
21314int
f676971a 21315rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21316 enum reg_class from, enum reg_class to)
34bb030a
DE
21317{
21318 /* Moves from/to GENERAL_REGS. */
21319 if (reg_classes_intersect_p (to, GENERAL_REGS)
21320 || reg_classes_intersect_p (from, GENERAL_REGS))
21321 {
21322 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21323 from = to;
21324
21325 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21326 return (rs6000_memory_move_cost (mode, from, 0)
21327 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21328
c4ad648e
AM
21329 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21330 shift. */
34bb030a
DE
21331 else if (from == CR_REGS)
21332 return 4;
21333
aafc759a
PH
21334 /* Power6 has slower LR/CTR moves so make them more expensive than
21335 memory in order to bias spills to memory .*/
21336 else if (rs6000_cpu == PROCESSOR_POWER6
21337 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
21338 return 6 * hard_regno_nregs[0][mode];
21339
34bb030a 21340 else
c4ad648e 21341 /* A move will cost one instruction per GPR moved. */
c8b622ff 21342 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21343 }
21344
c4ad648e 21345 /* Moving between two similar registers is just one instruction. */
34bb030a 21346 else if (reg_classes_intersect_p (to, from))
7393f7f8 21347 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21348
c4ad648e 21349 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21350 else
f676971a 21351 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21352 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21353}
21354
21355/* A C expressions returning the cost of moving data of MODE from a register to
21356 or from memory. */
21357
21358int
f676971a 21359rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21360 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21361{
21362 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21363 return 4 * hard_regno_nregs[0][mode];
34bb030a 21364 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21365 return 4 * hard_regno_nregs[32][mode];
34bb030a 21366 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21367 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21368 else
21369 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21370}
21371
9c78b944
DE
21372/* Returns a code for a target-specific builtin that implements
21373 reciprocal of the function, or NULL_TREE if not available. */
21374
21375static tree
21376rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21377 bool sqrt ATTRIBUTE_UNUSED)
21378{
21379 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21380 && flag_finite_math_only && !flag_trapping_math
21381 && flag_unsafe_math_optimizations))
21382 return NULL_TREE;
21383
21384 if (md_fn)
21385 return NULL_TREE;
21386 else
21387 switch (fn)
21388 {
21389 case BUILT_IN_SQRTF:
21390 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21391
21392 default:
21393 return NULL_TREE;
21394 }
21395}
21396
ef765ea9
DE
21397/* Newton-Raphson approximation of single-precision floating point divide n/d.
21398 Assumes no trapping math and finite arguments. */
21399
21400void
9c78b944 21401rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21402{
21403 rtx x0, e0, e1, y1, u0, v0, one;
21404
21405 x0 = gen_reg_rtx (SFmode);
21406 e0 = gen_reg_rtx (SFmode);
21407 e1 = gen_reg_rtx (SFmode);
21408 y1 = gen_reg_rtx (SFmode);
21409 u0 = gen_reg_rtx (SFmode);
21410 v0 = gen_reg_rtx (SFmode);
21411 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21412
21413 /* x0 = 1./d estimate */
21414 emit_insn (gen_rtx_SET (VOIDmode, x0,
21415 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21416 UNSPEC_FRES)));
21417 /* e0 = 1. - d * x0 */
21418 emit_insn (gen_rtx_SET (VOIDmode, e0,
21419 gen_rtx_MINUS (SFmode, one,
21420 gen_rtx_MULT (SFmode, d, x0))));
21421 /* e1 = e0 + e0 * e0 */
21422 emit_insn (gen_rtx_SET (VOIDmode, e1,
21423 gen_rtx_PLUS (SFmode,
21424 gen_rtx_MULT (SFmode, e0, e0), e0)));
21425 /* y1 = x0 + e1 * x0 */
21426 emit_insn (gen_rtx_SET (VOIDmode, y1,
21427 gen_rtx_PLUS (SFmode,
21428 gen_rtx_MULT (SFmode, e1, x0), x0)));
21429 /* u0 = n * y1 */
21430 emit_insn (gen_rtx_SET (VOIDmode, u0,
21431 gen_rtx_MULT (SFmode, n, y1)));
21432 /* v0 = n - d * u0 */
21433 emit_insn (gen_rtx_SET (VOIDmode, v0,
21434 gen_rtx_MINUS (SFmode, n,
21435 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21436 /* dst = u0 + v0 * y1 */
21437 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21438 gen_rtx_PLUS (SFmode,
21439 gen_rtx_MULT (SFmode, v0, y1), u0)));
21440}
21441
21442/* Newton-Raphson approximation of double-precision floating point divide n/d.
21443 Assumes no trapping math and finite arguments. */
21444
21445void
9c78b944 21446rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21447{
21448 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21449
21450 x0 = gen_reg_rtx (DFmode);
21451 e0 = gen_reg_rtx (DFmode);
21452 e1 = gen_reg_rtx (DFmode);
21453 e2 = gen_reg_rtx (DFmode);
21454 y1 = gen_reg_rtx (DFmode);
21455 y2 = gen_reg_rtx (DFmode);
21456 y3 = gen_reg_rtx (DFmode);
21457 u0 = gen_reg_rtx (DFmode);
21458 v0 = gen_reg_rtx (DFmode);
21459 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21460
21461 /* x0 = 1./d estimate */
21462 emit_insn (gen_rtx_SET (VOIDmode, x0,
21463 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21464 UNSPEC_FRES)));
21465 /* e0 = 1. - d * x0 */
21466 emit_insn (gen_rtx_SET (VOIDmode, e0,
21467 gen_rtx_MINUS (DFmode, one,
21468 gen_rtx_MULT (SFmode, d, x0))));
21469 /* y1 = x0 + e0 * x0 */
21470 emit_insn (gen_rtx_SET (VOIDmode, y1,
21471 gen_rtx_PLUS (DFmode,
21472 gen_rtx_MULT (DFmode, e0, x0), x0)));
21473 /* e1 = e0 * e0 */
21474 emit_insn (gen_rtx_SET (VOIDmode, e1,
21475 gen_rtx_MULT (DFmode, e0, e0)));
21476 /* y2 = y1 + e1 * y1 */
21477 emit_insn (gen_rtx_SET (VOIDmode, y2,
21478 gen_rtx_PLUS (DFmode,
21479 gen_rtx_MULT (DFmode, e1, y1), y1)));
21480 /* e2 = e1 * e1 */
21481 emit_insn (gen_rtx_SET (VOIDmode, e2,
21482 gen_rtx_MULT (DFmode, e1, e1)));
21483 /* y3 = y2 + e2 * y2 */
21484 emit_insn (gen_rtx_SET (VOIDmode, y3,
21485 gen_rtx_PLUS (DFmode,
21486 gen_rtx_MULT (DFmode, e2, y2), y2)));
21487 /* u0 = n * y3 */
21488 emit_insn (gen_rtx_SET (VOIDmode, u0,
21489 gen_rtx_MULT (DFmode, n, y3)));
21490 /* v0 = n - d * u0 */
21491 emit_insn (gen_rtx_SET (VOIDmode, v0,
21492 gen_rtx_MINUS (DFmode, n,
21493 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21494 /* dst = u0 + v0 * y3 */
21495 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21496 gen_rtx_PLUS (DFmode,
21497 gen_rtx_MULT (DFmode, v0, y3), u0)));
21498}
21499
565ef4ba 21500
9c78b944
DE
21501/* Newton-Raphson approximation of single-precision floating point rsqrt.
21502 Assumes no trapping math and finite arguments. */
21503
21504void
21505rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21506{
21507 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21508 half, one, halfthree, c1, cond, label;
21509
21510 x0 = gen_reg_rtx (SFmode);
21511 x1 = gen_reg_rtx (SFmode);
21512 x2 = gen_reg_rtx (SFmode);
21513 y1 = gen_reg_rtx (SFmode);
21514 u0 = gen_reg_rtx (SFmode);
21515 u1 = gen_reg_rtx (SFmode);
21516 u2 = gen_reg_rtx (SFmode);
21517 v0 = gen_reg_rtx (SFmode);
21518 v1 = gen_reg_rtx (SFmode);
21519 v2 = gen_reg_rtx (SFmode);
21520 t0 = gen_reg_rtx (SFmode);
21521 halfthree = gen_reg_rtx (SFmode);
21522 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21523 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21524
21525 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21526 emit_insn (gen_rtx_SET (VOIDmode, t0,
21527 gen_rtx_MULT (SFmode, src, src)));
21528
21529 emit_insn (gen_rtx_SET (VOIDmode, cond,
21530 gen_rtx_COMPARE (CCFPmode, t0, src)));
21531 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21532 emit_unlikely_jump (c1, label);
21533
21534 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21535 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21536
21537 /* halfthree = 1.5 = 1.0 + 0.5 */
21538 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21539 gen_rtx_PLUS (SFmode, one, half)));
21540
21541 /* x0 = rsqrt estimate */
21542 emit_insn (gen_rtx_SET (VOIDmode, x0,
21543 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21544 UNSPEC_RSQRT)));
21545
21546 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21547 emit_insn (gen_rtx_SET (VOIDmode, y1,
21548 gen_rtx_MINUS (SFmode,
21549 gen_rtx_MULT (SFmode, src, halfthree),
21550 src)));
21551
21552 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21553 emit_insn (gen_rtx_SET (VOIDmode, u0,
21554 gen_rtx_MULT (SFmode, x0, x0)));
21555 emit_insn (gen_rtx_SET (VOIDmode, v0,
21556 gen_rtx_MINUS (SFmode,
21557 halfthree,
21558 gen_rtx_MULT (SFmode, y1, u0))));
21559 emit_insn (gen_rtx_SET (VOIDmode, x1,
21560 gen_rtx_MULT (SFmode, x0, v0)));
21561
21562 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21563 emit_insn (gen_rtx_SET (VOIDmode, u1,
21564 gen_rtx_MULT (SFmode, x1, x1)));
21565 emit_insn (gen_rtx_SET (VOIDmode, v1,
21566 gen_rtx_MINUS (SFmode,
21567 halfthree,
21568 gen_rtx_MULT (SFmode, y1, u1))));
21569 emit_insn (gen_rtx_SET (VOIDmode, x2,
21570 gen_rtx_MULT (SFmode, x1, v1)));
21571
21572 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21573 emit_insn (gen_rtx_SET (VOIDmode, u2,
21574 gen_rtx_MULT (SFmode, x2, x2)));
21575 emit_insn (gen_rtx_SET (VOIDmode, v2,
21576 gen_rtx_MINUS (SFmode,
21577 halfthree,
21578 gen_rtx_MULT (SFmode, y1, u2))));
21579 emit_insn (gen_rtx_SET (VOIDmode, dst,
21580 gen_rtx_MULT (SFmode, x2, v2)));
21581
21582 emit_label (XEXP (label, 0));
21583}
21584
565ef4ba
RS
21585/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21586 target, and SRC is the argument operand. */
21587
21588void
21589rs6000_emit_popcount (rtx dst, rtx src)
21590{
21591 enum machine_mode mode = GET_MODE (dst);
21592 rtx tmp1, tmp2;
21593
21594 tmp1 = gen_reg_rtx (mode);
21595
21596 if (mode == SImode)
21597 {
21598 emit_insn (gen_popcntbsi2 (tmp1, src));
21599 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21600 NULL_RTX, 0);
21601 tmp2 = force_reg (SImode, tmp2);
21602 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21603 }
21604 else
21605 {
21606 emit_insn (gen_popcntbdi2 (tmp1, src));
21607 tmp2 = expand_mult (DImode, tmp1,
21608 GEN_INT ((HOST_WIDE_INT)
21609 0x01010101 << 32 | 0x01010101),
21610 NULL_RTX, 0);
21611 tmp2 = force_reg (DImode, tmp2);
21612 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21613 }
21614}
21615
21616
21617/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21618 target, and SRC is the argument operand. */
21619
21620void
21621rs6000_emit_parity (rtx dst, rtx src)
21622{
21623 enum machine_mode mode = GET_MODE (dst);
21624 rtx tmp;
21625
21626 tmp = gen_reg_rtx (mode);
21627 if (mode == SImode)
21628 {
21629 /* Is mult+shift >= shift+xor+shift+xor? */
21630 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21631 {
21632 rtx tmp1, tmp2, tmp3, tmp4;
21633
21634 tmp1 = gen_reg_rtx (SImode);
21635 emit_insn (gen_popcntbsi2 (tmp1, src));
21636
21637 tmp2 = gen_reg_rtx (SImode);
21638 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21639 tmp3 = gen_reg_rtx (SImode);
21640 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21641
21642 tmp4 = gen_reg_rtx (SImode);
21643 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21644 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21645 }
21646 else
21647 rs6000_emit_popcount (tmp, src);
21648 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21649 }
21650 else
21651 {
21652 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21653 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21654 {
21655 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21656
21657 tmp1 = gen_reg_rtx (DImode);
21658 emit_insn (gen_popcntbdi2 (tmp1, src));
21659
21660 tmp2 = gen_reg_rtx (DImode);
21661 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21662 tmp3 = gen_reg_rtx (DImode);
21663 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21664
21665 tmp4 = gen_reg_rtx (DImode);
21666 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21667 tmp5 = gen_reg_rtx (DImode);
21668 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21669
21670 tmp6 = gen_reg_rtx (DImode);
21671 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21672 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21673 }
21674 else
21675 rs6000_emit_popcount (tmp, src);
21676 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21677 }
21678}
21679
ded9bf77
AH
21680/* Return an RTX representing where to find the function value of a
21681 function returning MODE. */
21682static rtx
21683rs6000_complex_function_value (enum machine_mode mode)
21684{
21685 unsigned int regno;
21686 rtx r1, r2;
21687 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21688 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21689
18f63bfa
AH
21690 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21691 regno = FP_ARG_RETURN;
354ed18f
AH
21692 else
21693 {
18f63bfa 21694 regno = GP_ARG_RETURN;
ded9bf77 21695
18f63bfa
AH
21696 /* 32-bit is OK since it'll go in r3/r4. */
21697 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21698 return gen_rtx_REG (mode, regno);
21699 }
21700
18f63bfa
AH
21701 if (inner_bytes >= 8)
21702 return gen_rtx_REG (mode, regno);
21703
ded9bf77
AH
21704 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21705 const0_rtx);
21706 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21707 GEN_INT (inner_bytes));
ded9bf77
AH
21708 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21709}
21710
a6ebc39a
AH
21711/* Define how to find the value returned by a function.
21712 VALTYPE is the data type of the value (as a tree).
21713 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21714 otherwise, FUNC is 0.
21715
21716 On the SPE, both FPs and vectors are returned in r3.
21717
21718 On RS/6000 an integer value is in r3 and a floating-point value is in
21719 fp1, unless -msoft-float. */
21720
21721rtx
586de218 21722rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21723{
21724 enum machine_mode mode;
2a8fa26c 21725 unsigned int regno;
a6ebc39a 21726
594a51fe
SS
21727 /* Special handling for structs in darwin64. */
21728 if (rs6000_darwin64_abi
21729 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21730 && TREE_CODE (valtype) == RECORD_TYPE
21731 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21732 {
21733 CUMULATIVE_ARGS valcum;
21734 rtx valret;
21735
0b5383eb 21736 valcum.words = 0;
594a51fe
SS
21737 valcum.fregno = FP_ARG_MIN_REG;
21738 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21739 /* Do a trial code generation as if this were going to be passed as
21740 an argument; if any part goes in memory, we return NULL. */
21741 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21742 if (valret)
21743 return valret;
21744 /* Otherwise fall through to standard ABI rules. */
21745 }
21746
0e67400a
FJ
21747 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21748 {
21749 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21750 return gen_rtx_PARALLEL (DImode,
21751 gen_rtvec (2,
21752 gen_rtx_EXPR_LIST (VOIDmode,
21753 gen_rtx_REG (SImode, GP_ARG_RETURN),
21754 const0_rtx),
21755 gen_rtx_EXPR_LIST (VOIDmode,
21756 gen_rtx_REG (SImode,
21757 GP_ARG_RETURN + 1),
21758 GEN_INT (4))));
21759 }
0f086e42
FJ
21760 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21761 {
21762 return gen_rtx_PARALLEL (DCmode,
21763 gen_rtvec (4,
21764 gen_rtx_EXPR_LIST (VOIDmode,
21765 gen_rtx_REG (SImode, GP_ARG_RETURN),
21766 const0_rtx),
21767 gen_rtx_EXPR_LIST (VOIDmode,
21768 gen_rtx_REG (SImode,
21769 GP_ARG_RETURN + 1),
21770 GEN_INT (4)),
21771 gen_rtx_EXPR_LIST (VOIDmode,
21772 gen_rtx_REG (SImode,
21773 GP_ARG_RETURN + 2),
21774 GEN_INT (8)),
21775 gen_rtx_EXPR_LIST (VOIDmode,
21776 gen_rtx_REG (SImode,
21777 GP_ARG_RETURN + 3),
21778 GEN_INT (12))));
21779 }
602ea4d3 21780
7348aa7f
FXC
21781 mode = TYPE_MODE (valtype);
21782 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21783 || POINTER_TYPE_P (valtype))
b78d48dd 21784 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21785
e41b2a33
PB
21786 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21787 /* _Decimal128 must use an even/odd register pair. */
21788 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21789 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21790 regno = FP_ARG_RETURN;
ded9bf77 21791 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21792 && targetm.calls.split_complex_arg)
ded9bf77 21793 return rs6000_complex_function_value (mode);
44688022 21794 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21795 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21796 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21797 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21798 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21799 && (mode == DFmode || mode == DDmode || mode == DCmode
21800 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21801 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21802 else
21803 regno = GP_ARG_RETURN;
21804
21805 return gen_rtx_REG (mode, regno);
21806}
21807
ded9bf77
AH
21808/* Define how to find the value returned by a library function
21809 assuming the value has mode MODE. */
21810rtx
21811rs6000_libcall_value (enum machine_mode mode)
21812{
21813 unsigned int regno;
21814
2e6c9641
FJ
21815 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21816 {
21817 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21818 return gen_rtx_PARALLEL (DImode,
21819 gen_rtvec (2,
21820 gen_rtx_EXPR_LIST (VOIDmode,
21821 gen_rtx_REG (SImode, GP_ARG_RETURN),
21822 const0_rtx),
21823 gen_rtx_EXPR_LIST (VOIDmode,
21824 gen_rtx_REG (SImode,
21825 GP_ARG_RETURN + 1),
21826 GEN_INT (4))));
21827 }
21828
e41b2a33
PB
21829 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21830 /* _Decimal128 must use an even/odd register pair. */
21831 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 21832 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21833 && TARGET_HARD_FLOAT && TARGET_FPRS)
21834 regno = FP_ARG_RETURN;
44688022
AM
21835 else if (ALTIVEC_VECTOR_MODE (mode)
21836 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21837 regno = ALTIVEC_ARG_RETURN;
42ba5130 21838 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21839 return rs6000_complex_function_value (mode);
18f63bfa 21840 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21841 && (mode == DFmode || mode == DDmode || mode == DCmode
21842 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21843 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21844 else
21845 regno = GP_ARG_RETURN;
21846
21847 return gen_rtx_REG (mode, regno);
21848}
21849
d1d0c603
JJ
21850/* Define the offset between two registers, FROM to be eliminated and its
21851 replacement TO, at the start of a routine. */
21852HOST_WIDE_INT
21853rs6000_initial_elimination_offset (int from, int to)
21854{
21855 rs6000_stack_t *info = rs6000_stack_info ();
21856 HOST_WIDE_INT offset;
21857
7d5175e1 21858 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21859 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21860 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21861 {
21862 offset = info->push_p ? 0 : -info->total_size;
21863 if (FRAME_GROWS_DOWNWARD)
5b667039 21864 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21865 }
21866 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21867 offset = FRAME_GROWS_DOWNWARD
5b667039 21868 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21869 : 0;
21870 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21871 offset = info->total_size;
21872 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21873 offset = info->push_p ? info->total_size : 0;
21874 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21875 offset = 0;
21876 else
37409796 21877 gcc_unreachable ();
d1d0c603
JJ
21878
21879 return offset;
21880}
21881
58646b77 21882/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21883
c8e4f0e9 21884static bool
3101faab 21885rs6000_is_opaque_type (const_tree type)
62e1dfcf 21886{
58646b77 21887 return (type == opaque_V2SI_type_node
2abe3e28 21888 || type == opaque_V2SF_type_node
58646b77
PB
21889 || type == opaque_p_V2SI_type_node
21890 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21891}
21892
96714395 21893static rtx
a2369ed3 21894rs6000_dwarf_register_span (rtx reg)
96714395
AH
21895{
21896 unsigned regno;
21897
4d4cbc0e
AH
21898 if (TARGET_SPE
21899 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21900 || (TARGET_E500_DOUBLE
21901 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21902 ;
21903 else
96714395
AH
21904 return NULL_RTX;
21905
21906 regno = REGNO (reg);
21907
21908 /* The duality of the SPE register size wreaks all kinds of havoc.
21909 This is a way of distinguishing r0 in 32-bits from r0 in
21910 64-bits. */
21911 return
21912 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21913 BYTES_BIG_ENDIAN
21914 ? gen_rtvec (2,
21915 gen_rtx_REG (SImode, regno + 1200),
21916 gen_rtx_REG (SImode, regno))
21917 : gen_rtvec (2,
21918 gen_rtx_REG (SImode, regno),
21919 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21920}
21921
37ea0b7e
JM
21922/* Fill in sizes for SPE register high parts in table used by unwinder. */
21923
21924static void
21925rs6000_init_dwarf_reg_sizes_extra (tree address)
21926{
21927 if (TARGET_SPE)
21928 {
21929 int i;
21930 enum machine_mode mode = TYPE_MODE (char_type_node);
21931 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21932 rtx mem = gen_rtx_MEM (BLKmode, addr);
21933 rtx value = gen_int_mode (4, mode);
21934
21935 for (i = 1201; i < 1232; i++)
21936 {
21937 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21938 HOST_WIDE_INT offset
21939 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21940
21941 emit_move_insn (adjust_address (mem, mode, offset), value);
21942 }
21943 }
21944}
21945
93c9d1ba
AM
21946/* Map internal gcc register numbers to DWARF2 register numbers. */
21947
21948unsigned int
21949rs6000_dbx_register_number (unsigned int regno)
21950{
21951 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21952 return regno;
21953 if (regno == MQ_REGNO)
21954 return 100;
1de43f85 21955 if (regno == LR_REGNO)
93c9d1ba 21956 return 108;
1de43f85 21957 if (regno == CTR_REGNO)
93c9d1ba
AM
21958 return 109;
21959 if (CR_REGNO_P (regno))
21960 return regno - CR0_REGNO + 86;
21961 if (regno == XER_REGNO)
21962 return 101;
21963 if (ALTIVEC_REGNO_P (regno))
21964 return regno - FIRST_ALTIVEC_REGNO + 1124;
21965 if (regno == VRSAVE_REGNO)
21966 return 356;
21967 if (regno == VSCR_REGNO)
21968 return 67;
21969 if (regno == SPE_ACC_REGNO)
21970 return 99;
21971 if (regno == SPEFSCR_REGNO)
21972 return 612;
21973 /* SPE high reg number. We get these values of regno from
21974 rs6000_dwarf_register_span. */
37409796
NS
21975 gcc_assert (regno >= 1200 && regno < 1232);
21976 return regno;
93c9d1ba
AM
21977}
21978
93f90be6 21979/* target hook eh_return_filter_mode */
f676971a 21980static enum machine_mode
93f90be6
FJ
21981rs6000_eh_return_filter_mode (void)
21982{
21983 return TARGET_32BIT ? SImode : word_mode;
21984}
21985
00b79d54
BE
21986/* Target hook for scalar_mode_supported_p. */
21987static bool
21988rs6000_scalar_mode_supported_p (enum machine_mode mode)
21989{
21990 if (DECIMAL_FLOAT_MODE_P (mode))
21991 return true;
21992 else
21993 return default_scalar_mode_supported_p (mode);
21994}
21995
f676971a
EC
21996/* Target hook for vector_mode_supported_p. */
21997static bool
21998rs6000_vector_mode_supported_p (enum machine_mode mode)
21999{
22000
96038623
DE
22001 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22002 return true;
22003
f676971a
EC
22004 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22005 return true;
22006
22007 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22008 return true;
22009
22010 else
22011 return false;
22012}
22013
bb8df8a6
EC
22014/* Target hook for invalid_arg_for_unprototyped_fn. */
22015static const char *
3101faab 22016invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22017{
22018 return (!rs6000_darwin64_abi
22019 && typelist == 0
22020 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22021 && (funcdecl == NULL_TREE
22022 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22023 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22024 ? N_("AltiVec argument passed to unprototyped function")
22025 : NULL;
22026}
22027
3aebbe5f
JJ
22028/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22029 setup by using __stack_chk_fail_local hidden function instead of
22030 calling __stack_chk_fail directly. Otherwise it is better to call
22031 __stack_chk_fail directly. */
22032
22033static tree
22034rs6000_stack_protect_fail (void)
22035{
22036 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22037 ? default_hidden_stack_protect_fail ()
22038 : default_external_stack_protect_fail ();
22039}
22040
17211ab5 22041#include "gt-rs6000.h"