]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
Daily bump.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
66647d44 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
d1b38208 116typedef struct GTY(()) machine_function
5b667039
JJ
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
ec507f2d
DE
146/* Always emit branch hint bits. */
147static GTY(()) bool rs6000_always_hint;
148
149/* Schedule instructions for group formation. */
150static GTY(()) bool rs6000_sched_groups;
151
44cd321e
PS
152/* Align branch targets. */
153static GTY(()) bool rs6000_align_branch_targets;
154
569fa502
DN
155/* Support for -msched-costly-dep option. */
156const char *rs6000_sched_costly_dep_str;
157enum rs6000_dependence_cost rs6000_sched_costly_dep;
158
cbe26ab8
DN
159/* Support for -minsert-sched-nops option. */
160const char *rs6000_sched_insert_nops_str;
161enum rs6000_nop_insertion rs6000_sched_insert_nops;
162
7ccf35ed 163/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 164static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 165
602ea4d3 166/* Size of long double. */
6fa3f289
ZW
167int rs6000_long_double_type_size;
168
602ea4d3
JJ
169/* IEEE quad extended precision long double. */
170int rs6000_ieeequad;
171
a2db2771 172/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
173int rs6000_altivec_abi;
174
94f4765c
NF
175/* Nonzero if we want SPE SIMD instructions. */
176int rs6000_spe;
177
a3170dc6
AH
178/* Nonzero if we want SPE ABI extensions. */
179int rs6000_spe_abi;
180
94f4765c
NF
181/* Nonzero to use isel instructions. */
182int rs6000_isel;
183
5da702b1
AH
184/* Nonzero if floating point operations are done in the GPRs. */
185int rs6000_float_gprs = 0;
186
594a51fe
SS
187/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
188int rs6000_darwin64_abi;
189
a0ab749a 190/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 191static GTY(()) int common_mode_defined;
c81bebd7 192
874a0744
MM
193/* Label number of label created for -mrelocatable, to call to so we can
194 get the address of the GOT section */
195int rs6000_pic_labelno;
c81bebd7 196
b91da81f 197#ifdef USING_ELFOS_H
c81bebd7 198/* Which abi to adhere to */
9739c90c 199const char *rs6000_abi_name;
d9407988
MM
200
201/* Semantics of the small data area */
202enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
203
204/* Which small data model to use */
815cdc52 205const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
206
207/* Counter for labels which are to be placed in .fixup. */
208int fixuplabelno = 0;
874a0744 209#endif
4697a36c 210
c4501e62
JJ
211/* Bit size of immediate TLS offsets and string from which it is decoded. */
212int rs6000_tls_size = 32;
213const char *rs6000_tls_size_string;
214
b6c9286a
MM
215/* ABI enumeration available for subtarget to use. */
216enum rs6000_abi rs6000_current_abi;
217
85b776df
AM
218/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
219int dot_symbols;
220
38c1f2d7 221/* Debug flags */
815cdc52 222const char *rs6000_debug_name;
38c1f2d7
MM
223int rs6000_debug_stack; /* debug stack applications */
224int rs6000_debug_arg; /* debug argument handling */
225
aabcd309 226/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
227bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
228
58646b77
PB
229/* Built in types. */
230
231tree rs6000_builtin_types[RS6000_BTI_MAX];
232tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 233
57ac7be9
AM
234const char *rs6000_traceback_name;
235static enum {
236 traceback_default = 0,
237 traceback_none,
238 traceback_part,
239 traceback_full
240} rs6000_traceback;
241
38c1f2d7
MM
242/* Flag to say the TOC is initialized */
243int toc_initialized;
9ebbca7d 244char toc_label_name[10];
38c1f2d7 245
44cd321e
PS
246/* Cached value of rs6000_variable_issue. This is cached in
247 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
248static short cached_can_issue_more;
249
d6b5193b
RS
250static GTY(()) section *read_only_data_section;
251static GTY(()) section *private_data_section;
252static GTY(()) section *read_only_private_data_section;
253static GTY(()) section *sdata2_section;
254static GTY(()) section *toc_section;
255
a3c9585f
KH
256/* Control alignment for fields within structures. */
257/* String from -malign-XXXXX. */
025d9908
KH
258int rs6000_alignment_flags;
259
78f5898b
AH
260/* True for any options that were explicitly set. */
261struct {
df01da37 262 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 263 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
264 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
265 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
266 bool spe; /* True if -mspe= was used. */
267 bool float_gprs; /* True if -mfloat-gprs= was used. */
268 bool isel; /* True if -misel was used. */
269 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 270 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 271 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
272} rs6000_explicit_options;
273
a3170dc6
AH
274struct builtin_description
275{
276 /* mask is not const because we're going to alter it below. This
277 nonsense will go away when we rewrite the -march infrastructure
278 to give us more target flag bits. */
279 unsigned int mask;
280 const enum insn_code icode;
281 const char *const name;
282 const enum rs6000_builtins code;
283};
8b897cfa
RS
284\f
285/* Target cpu costs. */
286
287struct processor_costs {
c4ad648e 288 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
289 const int mulsi_const; /* cost of SImode multiplication by constant. */
290 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
291 const int muldi; /* cost of DImode multiplication. */
292 const int divsi; /* cost of SImode division. */
293 const int divdi; /* cost of DImode division. */
294 const int fp; /* cost of simple SFmode and DFmode insns. */
295 const int dmul; /* cost of DFmode multiplication (and fmadd). */
296 const int sdiv; /* cost of SFmode division (fdivs). */
297 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
298 const int cache_line_size; /* cache line size in bytes. */
299 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
300 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
301 const int simultaneous_prefetches; /* number of parallel prefetch
302 operations. */
8b897cfa
RS
303};
304
305const struct processor_costs *rs6000_cost;
306
307/* Processor costs (relative to an add) */
308
309/* Instruction size costs on 32bit processors. */
310static const
311struct processor_costs size32_cost = {
06a67bdd
RS
312 COSTS_N_INSNS (1), /* mulsi */
313 COSTS_N_INSNS (1), /* mulsi_const */
314 COSTS_N_INSNS (1), /* mulsi_const9 */
315 COSTS_N_INSNS (1), /* muldi */
316 COSTS_N_INSNS (1), /* divsi */
317 COSTS_N_INSNS (1), /* divdi */
318 COSTS_N_INSNS (1), /* fp */
319 COSTS_N_INSNS (1), /* dmul */
320 COSTS_N_INSNS (1), /* sdiv */
321 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
322 32,
323 0,
324 0,
5f732aba 325 0,
8b897cfa
RS
326};
327
328/* Instruction size costs on 64bit processors. */
329static const
330struct processor_costs size64_cost = {
06a67bdd
RS
331 COSTS_N_INSNS (1), /* mulsi */
332 COSTS_N_INSNS (1), /* mulsi_const */
333 COSTS_N_INSNS (1), /* mulsi_const9 */
334 COSTS_N_INSNS (1), /* muldi */
335 COSTS_N_INSNS (1), /* divsi */
336 COSTS_N_INSNS (1), /* divdi */
337 COSTS_N_INSNS (1), /* fp */
338 COSTS_N_INSNS (1), /* dmul */
339 COSTS_N_INSNS (1), /* sdiv */
340 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
341 128,
342 0,
343 0,
5f732aba 344 0,
8b897cfa
RS
345};
346
347/* Instruction costs on RIOS1 processors. */
348static const
349struct processor_costs rios1_cost = {
06a67bdd
RS
350 COSTS_N_INSNS (5), /* mulsi */
351 COSTS_N_INSNS (4), /* mulsi_const */
352 COSTS_N_INSNS (3), /* mulsi_const9 */
353 COSTS_N_INSNS (5), /* muldi */
354 COSTS_N_INSNS (19), /* divsi */
355 COSTS_N_INSNS (19), /* divdi */
356 COSTS_N_INSNS (2), /* fp */
357 COSTS_N_INSNS (2), /* dmul */
358 COSTS_N_INSNS (19), /* sdiv */
359 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 360 128, /* cache line size */
5f732aba
DE
361 64, /* l1 cache */
362 512, /* l2 cache */
0b11da67 363 0, /* streams */
8b897cfa
RS
364};
365
366/* Instruction costs on RIOS2 processors. */
367static const
368struct processor_costs rios2_cost = {
06a67bdd
RS
369 COSTS_N_INSNS (2), /* mulsi */
370 COSTS_N_INSNS (2), /* mulsi_const */
371 COSTS_N_INSNS (2), /* mulsi_const9 */
372 COSTS_N_INSNS (2), /* muldi */
373 COSTS_N_INSNS (13), /* divsi */
374 COSTS_N_INSNS (13), /* divdi */
375 COSTS_N_INSNS (2), /* fp */
376 COSTS_N_INSNS (2), /* dmul */
377 COSTS_N_INSNS (17), /* sdiv */
378 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 379 256, /* cache line size */
5f732aba
DE
380 256, /* l1 cache */
381 1024, /* l2 cache */
0b11da67 382 0, /* streams */
8b897cfa
RS
383};
384
385/* Instruction costs on RS64A processors. */
386static const
387struct processor_costs rs64a_cost = {
06a67bdd
RS
388 COSTS_N_INSNS (20), /* mulsi */
389 COSTS_N_INSNS (12), /* mulsi_const */
390 COSTS_N_INSNS (8), /* mulsi_const9 */
391 COSTS_N_INSNS (34), /* muldi */
392 COSTS_N_INSNS (65), /* divsi */
393 COSTS_N_INSNS (67), /* divdi */
394 COSTS_N_INSNS (4), /* fp */
395 COSTS_N_INSNS (4), /* dmul */
396 COSTS_N_INSNS (31), /* sdiv */
397 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 398 128, /* cache line size */
5f732aba
DE
399 128, /* l1 cache */
400 2048, /* l2 cache */
0b11da67 401 1, /* streams */
8b897cfa
RS
402};
403
404/* Instruction costs on MPCCORE processors. */
405static const
406struct processor_costs mpccore_cost = {
06a67bdd
RS
407 COSTS_N_INSNS (2), /* mulsi */
408 COSTS_N_INSNS (2), /* mulsi_const */
409 COSTS_N_INSNS (2), /* mulsi_const9 */
410 COSTS_N_INSNS (2), /* muldi */
411 COSTS_N_INSNS (6), /* divsi */
412 COSTS_N_INSNS (6), /* divdi */
413 COSTS_N_INSNS (4), /* fp */
414 COSTS_N_INSNS (5), /* dmul */
415 COSTS_N_INSNS (10), /* sdiv */
416 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 417 32, /* cache line size */
5f732aba
DE
418 4, /* l1 cache */
419 16, /* l2 cache */
0b11da67 420 1, /* streams */
8b897cfa
RS
421};
422
423/* Instruction costs on PPC403 processors. */
424static const
425struct processor_costs ppc403_cost = {
06a67bdd
RS
426 COSTS_N_INSNS (4), /* mulsi */
427 COSTS_N_INSNS (4), /* mulsi_const */
428 COSTS_N_INSNS (4), /* mulsi_const9 */
429 COSTS_N_INSNS (4), /* muldi */
430 COSTS_N_INSNS (33), /* divsi */
431 COSTS_N_INSNS (33), /* divdi */
432 COSTS_N_INSNS (11), /* fp */
433 COSTS_N_INSNS (11), /* dmul */
434 COSTS_N_INSNS (11), /* sdiv */
435 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 436 32, /* cache line size */
5f732aba
DE
437 4, /* l1 cache */
438 16, /* l2 cache */
0b11da67 439 1, /* streams */
8b897cfa
RS
440};
441
442/* Instruction costs on PPC405 processors. */
443static const
444struct processor_costs ppc405_cost = {
06a67bdd
RS
445 COSTS_N_INSNS (5), /* mulsi */
446 COSTS_N_INSNS (4), /* mulsi_const */
447 COSTS_N_INSNS (3), /* mulsi_const9 */
448 COSTS_N_INSNS (5), /* muldi */
449 COSTS_N_INSNS (35), /* divsi */
450 COSTS_N_INSNS (35), /* divdi */
451 COSTS_N_INSNS (11), /* fp */
452 COSTS_N_INSNS (11), /* dmul */
453 COSTS_N_INSNS (11), /* sdiv */
454 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 455 32, /* cache line size */
5f732aba
DE
456 16, /* l1 cache */
457 128, /* l2 cache */
0b11da67 458 1, /* streams */
8b897cfa
RS
459};
460
461/* Instruction costs on PPC440 processors. */
462static const
463struct processor_costs ppc440_cost = {
06a67bdd
RS
464 COSTS_N_INSNS (3), /* mulsi */
465 COSTS_N_INSNS (2), /* mulsi_const */
466 COSTS_N_INSNS (2), /* mulsi_const9 */
467 COSTS_N_INSNS (3), /* muldi */
468 COSTS_N_INSNS (34), /* divsi */
469 COSTS_N_INSNS (34), /* divdi */
470 COSTS_N_INSNS (5), /* fp */
471 COSTS_N_INSNS (5), /* dmul */
472 COSTS_N_INSNS (19), /* sdiv */
473 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 474 32, /* cache line size */
5f732aba
DE
475 32, /* l1 cache */
476 256, /* l2 cache */
0b11da67 477 1, /* streams */
8b897cfa
RS
478};
479
480/* Instruction costs on PPC601 processors. */
481static const
482struct processor_costs ppc601_cost = {
06a67bdd
RS
483 COSTS_N_INSNS (5), /* mulsi */
484 COSTS_N_INSNS (5), /* mulsi_const */
485 COSTS_N_INSNS (5), /* mulsi_const9 */
486 COSTS_N_INSNS (5), /* muldi */
487 COSTS_N_INSNS (36), /* divsi */
488 COSTS_N_INSNS (36), /* divdi */
489 COSTS_N_INSNS (4), /* fp */
490 COSTS_N_INSNS (5), /* dmul */
491 COSTS_N_INSNS (17), /* sdiv */
492 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 493 32, /* cache line size */
5f732aba
DE
494 32, /* l1 cache */
495 256, /* l2 cache */
0b11da67 496 1, /* streams */
8b897cfa
RS
497};
498
499/* Instruction costs on PPC603 processors. */
500static const
501struct processor_costs ppc603_cost = {
06a67bdd
RS
502 COSTS_N_INSNS (5), /* mulsi */
503 COSTS_N_INSNS (3), /* mulsi_const */
504 COSTS_N_INSNS (2), /* mulsi_const9 */
505 COSTS_N_INSNS (5), /* muldi */
506 COSTS_N_INSNS (37), /* divsi */
507 COSTS_N_INSNS (37), /* divdi */
508 COSTS_N_INSNS (3), /* fp */
509 COSTS_N_INSNS (4), /* dmul */
510 COSTS_N_INSNS (18), /* sdiv */
511 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 512 32, /* cache line size */
5f732aba
DE
513 8, /* l1 cache */
514 64, /* l2 cache */
0b11da67 515 1, /* streams */
8b897cfa
RS
516};
517
518/* Instruction costs on PPC604 processors. */
519static const
520struct processor_costs ppc604_cost = {
06a67bdd
RS
521 COSTS_N_INSNS (4), /* mulsi */
522 COSTS_N_INSNS (4), /* mulsi_const */
523 COSTS_N_INSNS (4), /* mulsi_const9 */
524 COSTS_N_INSNS (4), /* muldi */
525 COSTS_N_INSNS (20), /* divsi */
526 COSTS_N_INSNS (20), /* divdi */
527 COSTS_N_INSNS (3), /* fp */
528 COSTS_N_INSNS (3), /* dmul */
529 COSTS_N_INSNS (18), /* sdiv */
530 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 531 32, /* cache line size */
5f732aba
DE
532 16, /* l1 cache */
533 512, /* l2 cache */
0b11da67 534 1, /* streams */
8b897cfa
RS
535};
536
537/* Instruction costs on PPC604e processors. */
538static const
539struct processor_costs ppc604e_cost = {
06a67bdd
RS
540 COSTS_N_INSNS (2), /* mulsi */
541 COSTS_N_INSNS (2), /* mulsi_const */
542 COSTS_N_INSNS (2), /* mulsi_const9 */
543 COSTS_N_INSNS (2), /* muldi */
544 COSTS_N_INSNS (20), /* divsi */
545 COSTS_N_INSNS (20), /* divdi */
546 COSTS_N_INSNS (3), /* fp */
547 COSTS_N_INSNS (3), /* dmul */
548 COSTS_N_INSNS (18), /* sdiv */
549 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 550 32, /* cache line size */
5f732aba
DE
551 32, /* l1 cache */
552 1024, /* l2 cache */
0b11da67 553 1, /* streams */
8b897cfa
RS
554};
555
f0517163 556/* Instruction costs on PPC620 processors. */
8b897cfa
RS
557static const
558struct processor_costs ppc620_cost = {
06a67bdd
RS
559 COSTS_N_INSNS (5), /* mulsi */
560 COSTS_N_INSNS (4), /* mulsi_const */
561 COSTS_N_INSNS (3), /* mulsi_const9 */
562 COSTS_N_INSNS (7), /* muldi */
563 COSTS_N_INSNS (21), /* divsi */
564 COSTS_N_INSNS (37), /* divdi */
565 COSTS_N_INSNS (3), /* fp */
566 COSTS_N_INSNS (3), /* dmul */
567 COSTS_N_INSNS (18), /* sdiv */
568 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 569 128, /* cache line size */
5f732aba
DE
570 32, /* l1 cache */
571 1024, /* l2 cache */
0b11da67 572 1, /* streams */
f0517163
RS
573};
574
575/* Instruction costs on PPC630 processors. */
576static const
577struct processor_costs ppc630_cost = {
06a67bdd
RS
578 COSTS_N_INSNS (5), /* mulsi */
579 COSTS_N_INSNS (4), /* mulsi_const */
580 COSTS_N_INSNS (3), /* mulsi_const9 */
581 COSTS_N_INSNS (7), /* muldi */
582 COSTS_N_INSNS (21), /* divsi */
583 COSTS_N_INSNS (37), /* divdi */
584 COSTS_N_INSNS (3), /* fp */
585 COSTS_N_INSNS (3), /* dmul */
586 COSTS_N_INSNS (17), /* sdiv */
587 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 588 128, /* cache line size */
5f732aba
DE
589 64, /* l1 cache */
590 1024, /* l2 cache */
0b11da67 591 1, /* streams */
8b897cfa
RS
592};
593
d296e02e
AP
594/* Instruction costs on Cell processor. */
595/* COSTS_N_INSNS (1) ~ one add. */
596static const
597struct processor_costs ppccell_cost = {
598 COSTS_N_INSNS (9/2)+2, /* mulsi */
599 COSTS_N_INSNS (6/2), /* mulsi_const */
600 COSTS_N_INSNS (6/2), /* mulsi_const9 */
601 COSTS_N_INSNS (15/2)+2, /* muldi */
602 COSTS_N_INSNS (38/2), /* divsi */
603 COSTS_N_INSNS (70/2), /* divdi */
604 COSTS_N_INSNS (10/2), /* fp */
605 COSTS_N_INSNS (10/2), /* dmul */
606 COSTS_N_INSNS (74/2), /* sdiv */
607 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 608 128, /* cache line size */
5f732aba
DE
609 32, /* l1 cache */
610 512, /* l2 cache */
611 6, /* streams */
d296e02e
AP
612};
613
8b897cfa
RS
614/* Instruction costs on PPC750 and PPC7400 processors. */
615static const
616struct processor_costs ppc750_cost = {
06a67bdd
RS
617 COSTS_N_INSNS (5), /* mulsi */
618 COSTS_N_INSNS (3), /* mulsi_const */
619 COSTS_N_INSNS (2), /* mulsi_const9 */
620 COSTS_N_INSNS (5), /* muldi */
621 COSTS_N_INSNS (17), /* divsi */
622 COSTS_N_INSNS (17), /* divdi */
623 COSTS_N_INSNS (3), /* fp */
624 COSTS_N_INSNS (3), /* dmul */
625 COSTS_N_INSNS (17), /* sdiv */
626 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 627 32, /* cache line size */
5f732aba
DE
628 32, /* l1 cache */
629 512, /* l2 cache */
0b11da67 630 1, /* streams */
8b897cfa
RS
631};
632
633/* Instruction costs on PPC7450 processors. */
634static const
635struct processor_costs ppc7450_cost = {
06a67bdd
RS
636 COSTS_N_INSNS (4), /* mulsi */
637 COSTS_N_INSNS (3), /* mulsi_const */
638 COSTS_N_INSNS (3), /* mulsi_const9 */
639 COSTS_N_INSNS (4), /* muldi */
640 COSTS_N_INSNS (23), /* divsi */
641 COSTS_N_INSNS (23), /* divdi */
642 COSTS_N_INSNS (5), /* fp */
643 COSTS_N_INSNS (5), /* dmul */
644 COSTS_N_INSNS (21), /* sdiv */
645 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 646 32, /* cache line size */
5f732aba
DE
647 32, /* l1 cache */
648 1024, /* l2 cache */
0b11da67 649 1, /* streams */
8b897cfa 650};
a3170dc6 651
8b897cfa
RS
652/* Instruction costs on PPC8540 processors. */
653static const
654struct processor_costs ppc8540_cost = {
06a67bdd
RS
655 COSTS_N_INSNS (4), /* mulsi */
656 COSTS_N_INSNS (4), /* mulsi_const */
657 COSTS_N_INSNS (4), /* mulsi_const9 */
658 COSTS_N_INSNS (4), /* muldi */
659 COSTS_N_INSNS (19), /* divsi */
660 COSTS_N_INSNS (19), /* divdi */
661 COSTS_N_INSNS (4), /* fp */
662 COSTS_N_INSNS (4), /* dmul */
663 COSTS_N_INSNS (29), /* sdiv */
664 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 665 32, /* cache line size */
5f732aba
DE
666 32, /* l1 cache */
667 256, /* l2 cache */
0b11da67 668 1, /* prefetch streams /*/
8b897cfa
RS
669};
670
fa41c305
EW
671/* Instruction costs on E300C2 and E300C3 cores. */
672static const
673struct processor_costs ppce300c2c3_cost = {
674 COSTS_N_INSNS (4), /* mulsi */
675 COSTS_N_INSNS (4), /* mulsi_const */
676 COSTS_N_INSNS (4), /* mulsi_const9 */
677 COSTS_N_INSNS (4), /* muldi */
678 COSTS_N_INSNS (19), /* divsi */
679 COSTS_N_INSNS (19), /* divdi */
680 COSTS_N_INSNS (3), /* fp */
681 COSTS_N_INSNS (4), /* dmul */
682 COSTS_N_INSNS (18), /* sdiv */
683 COSTS_N_INSNS (33), /* ddiv */
642639ce 684 32,
a19b7d46
EW
685 16, /* l1 cache */
686 16, /* l2 cache */
642639ce 687 1, /* prefetch streams /*/
fa41c305
EW
688};
689
edae5fe3
DE
690/* Instruction costs on PPCE500MC processors. */
691static const
692struct processor_costs ppce500mc_cost = {
693 COSTS_N_INSNS (4), /* mulsi */
694 COSTS_N_INSNS (4), /* mulsi_const */
695 COSTS_N_INSNS (4), /* mulsi_const9 */
696 COSTS_N_INSNS (4), /* muldi */
697 COSTS_N_INSNS (14), /* divsi */
698 COSTS_N_INSNS (14), /* divdi */
699 COSTS_N_INSNS (8), /* fp */
700 COSTS_N_INSNS (10), /* dmul */
701 COSTS_N_INSNS (36), /* sdiv */
702 COSTS_N_INSNS (66), /* ddiv */
703 64, /* cache line size */
704 32, /* l1 cache */
705 128, /* l2 cache */
706 1, /* prefetch streams /*/
707};
708
8b897cfa
RS
709/* Instruction costs on POWER4 and POWER5 processors. */
710static const
711struct processor_costs power4_cost = {
06a67bdd
RS
712 COSTS_N_INSNS (3), /* mulsi */
713 COSTS_N_INSNS (2), /* mulsi_const */
714 COSTS_N_INSNS (2), /* mulsi_const9 */
715 COSTS_N_INSNS (4), /* muldi */
716 COSTS_N_INSNS (18), /* divsi */
717 COSTS_N_INSNS (34), /* divdi */
718 COSTS_N_INSNS (3), /* fp */
719 COSTS_N_INSNS (3), /* dmul */
720 COSTS_N_INSNS (17), /* sdiv */
721 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 722 128, /* cache line size */
5f732aba
DE
723 32, /* l1 cache */
724 1024, /* l2 cache */
0b11da67 725 8, /* prefetch streams /*/
8b897cfa
RS
726};
727
44cd321e
PS
728/* Instruction costs on POWER6 processors. */
729static const
730struct processor_costs power6_cost = {
731 COSTS_N_INSNS (8), /* mulsi */
732 COSTS_N_INSNS (8), /* mulsi_const */
733 COSTS_N_INSNS (8), /* mulsi_const9 */
734 COSTS_N_INSNS (8), /* muldi */
735 COSTS_N_INSNS (22), /* divsi */
736 COSTS_N_INSNS (28), /* divdi */
737 COSTS_N_INSNS (3), /* fp */
738 COSTS_N_INSNS (3), /* dmul */
739 COSTS_N_INSNS (13), /* sdiv */
740 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 741 128, /* cache line size */
5f732aba
DE
742 64, /* l1 cache */
743 2048, /* l2 cache */
0b11da67 744 16, /* prefetch streams */
44cd321e
PS
745};
746
8b897cfa 747\f
a2369ed3 748static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 749static const char *rs6000_invalid_within_doloop (const_rtx);
c6c3dba9 750static bool rs6000_legitimate_address_p (enum machine_mode, rtx, bool);
f90b7a5a 751static rtx rs6000_generate_compare (rtx, enum machine_mode);
a2369ed3
DJ
752static void rs6000_emit_stack_tie (void);
753static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 754static bool spe_func_has_64bit_regs_p (void);
b20a9cca 755static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 756 int, HOST_WIDE_INT);
a2369ed3 757static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 758static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
759static unsigned rs6000_hash_constant (rtx);
760static unsigned toc_hash_function (const void *);
761static int toc_hash_eq (const void *, const void *);
a2369ed3 762static bool constant_pool_expr_p (rtx);
d04b6e6e 763static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
764static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
765static struct machine_function * rs6000_init_machine_status (void);
766static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 767static bool no_global_regs_above (int, bool);
5add3202 768#ifdef HAVE_GAS_HIDDEN
a2369ed3 769static void rs6000_assemble_visibility (tree, int);
5add3202 770#endif
a2369ed3
DJ
771static int rs6000_ra_ever_killed (void);
772static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 773static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 774static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 775static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 776static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 777static const char *rs6000_mangle_type (const_tree);
6bc7bc14 778EXPORTED_CONST struct attribute_spec rs6000_attribute_table[];
a2369ed3 779static void rs6000_set_default_type_attributes (tree);
f78c3290 780static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
ff35822b 781static rtx rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
f78c3290
NF
782static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
783 enum machine_mode, bool, bool, bool);
52ff33d0 784static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
785static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
786static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
787static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
788static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
789static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
790 tree);
a2369ed3 791static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 792static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 793static void rs6000_file_start (void);
7c262518 794#if TARGET_ELF
9b580a0b 795static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
796static void rs6000_elf_asm_out_constructor (rtx, int);
797static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 798static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 799static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
800static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
801 unsigned HOST_WIDE_INT);
a56d7372 802static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 803 ATTRIBUTE_UNUSED;
7c262518 804#endif
3101faab 805static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
806static void rs6000_alloc_sdmode_stack_slot (void);
807static void rs6000_instantiate_decls (void);
cbaaba19 808#if TARGET_XCOFF
0d5817b2 809static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 810static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 811static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 812static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 813static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 814static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 815 unsigned HOST_WIDE_INT);
d6b5193b
RS
816static void rs6000_xcoff_unique_section (tree, int);
817static section *rs6000_xcoff_select_rtx_section
818 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
819static const char * rs6000_xcoff_strip_name_encoding (const char *);
820static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
821static void rs6000_xcoff_file_start (void);
822static void rs6000_xcoff_file_end (void);
f1384257 823#endif
a2369ed3 824static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 825static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 826static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 827static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 828static bool is_microcoded_insn (rtx);
d296e02e 829static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
830static bool is_cracked_insn (rtx);
831static bool is_branch_slot_insn (rtx);
44cd321e 832static bool is_load_insn (rtx);
e3a0e200 833static rtx get_store_dest (rtx pat);
44cd321e
PS
834static bool is_store_insn (rtx);
835static bool set_to_load_agen (rtx,rtx);
982afe02 836static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
837static int rs6000_adjust_priority (rtx, int);
838static int rs6000_issue_rate (void);
b198261f 839static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
840static rtx get_next_active_insn (rtx, rtx);
841static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
842static bool insn_must_be_first_in_group (rtx);
843static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
844static bool is_costly_group (rtx *, rtx);
845static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
846static int redefine_groups (FILE *, int, rtx, rtx);
847static int pad_groups (FILE *, int, rtx, rtx);
848static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
849static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
850static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 851static int rs6000_use_sched_lookahead (void);
d296e02e 852static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
853static void * rs6000_alloc_sched_context (void);
854static void rs6000_init_sched_context (void *, bool);
855static void rs6000_set_sched_context (void *);
856static void rs6000_free_sched_context (void *);
9c78b944 857static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 858static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
859static tree rs6000_builtin_mul_widen_even (tree);
860static tree rs6000_builtin_mul_widen_odd (tree);
744aa42f 861static tree rs6000_builtin_conversion (unsigned int, tree);
0fca40f5 862static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 863
58646b77 864static void def_builtin (int, const char *, tree, int);
3101faab 865static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
866static void rs6000_init_builtins (void);
867static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
868static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
869static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
870static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
871static void altivec_init_builtins (void);
872static void rs6000_common_init_builtins (void);
c15c90bb 873static void rs6000_init_libfuncs (void);
a2369ed3 874
96038623
DE
875static void paired_init_builtins (void);
876static rtx paired_expand_builtin (tree, rtx, bool *);
877static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
878static rtx paired_expand_stv_builtin (enum insn_code, tree);
879static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
880
b20a9cca
AM
881static void enable_mask_for_builtins (struct builtin_description *, int,
882 enum rs6000_builtins,
883 enum rs6000_builtins);
a2369ed3
DJ
884static void spe_init_builtins (void);
885static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 886static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
887static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
888static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
889static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
890static rs6000_stack_t *rs6000_stack_info (void);
891static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
892
893static rtx altivec_expand_builtin (tree, rtx, bool *);
894static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
895static rtx altivec_expand_st_builtin (tree, rtx, bool *);
896static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
897static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 898static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 899 const char *, tree, rtx);
a2369ed3 900static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
901static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
902static rtx altivec_expand_vec_set_builtin (tree);
903static rtx altivec_expand_vec_ext_builtin (tree, rtx);
904static int get_element_number (tree, tree);
78f5898b 905static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 906static void rs6000_parse_tls_size_option (void);
5da702b1 907static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
908static int first_altivec_reg_to_save (void);
909static unsigned int compute_vrsave_mask (void);
9390387d 910static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
911static void is_altivec_return_reg (rtx, void *);
912static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
913int easy_vector_constant (rtx, enum machine_mode);
a2369ed3 914static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 915static void rs6000_init_dwarf_reg_sizes_extra (tree);
506d7b68 916static rtx rs6000_legitimize_address (rtx, rtx, enum machine_mode);
a2369ed3 917static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 918static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
919static rtx rs6000_tls_get_addr (void);
920static rtx rs6000_got_sym (void);
9390387d 921static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
922static const char *rs6000_get_some_local_dynamic_name (void);
923static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 924static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 925static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 926 enum machine_mode, tree);
0b5383eb
DJ
927static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
928 HOST_WIDE_INT);
929static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
930 tree, HOST_WIDE_INT);
931static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
932 HOST_WIDE_INT,
933 rtx[], int *);
934static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
935 const_tree, HOST_WIDE_INT,
936 rtx[], int *);
937static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 938static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 939static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
940static void setup_incoming_varargs (CUMULATIVE_ARGS *,
941 enum machine_mode, tree,
942 int *, int);
8cd5a4e0 943static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 944 const_tree, bool);
78a52f11
RH
945static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
946 tree, bool);
3101faab 947static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
948#if TARGET_MACHO
949static void macho_branch_islands (void);
efdba735
SH
950static int no_previous_def (tree function_name);
951static tree get_prev_label (tree function_name);
c4e18b1c 952static void rs6000_darwin_file_start (void);
efdba735
SH
953#endif
954
c35d187f 955static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 956static void rs6000_va_start (tree, rtx);
726a989a 957static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 958static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 959static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 960static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 961static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 962 enum machine_mode);
94ff898d 963static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
964 enum machine_mode);
965static int get_vsel_insn (enum machine_mode);
966static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 967static tree rs6000_stack_protect_fail (void);
21213b4c
DP
968
969const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
970static enum machine_mode rs6000_eh_return_filter_mode (void);
971
17211ab5
GK
972/* Hash table stuff for keeping track of TOC entries. */
973
d1b38208 974struct GTY(()) toc_hash_struct
17211ab5
GK
975{
976 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
977 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
978 rtx key;
979 enum machine_mode key_mode;
980 int labelno;
981};
982
983static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
984\f
985/* Default register names. */
986char rs6000_reg_names[][8] =
987{
802a0058
MM
988 "0", "1", "2", "3", "4", "5", "6", "7",
989 "8", "9", "10", "11", "12", "13", "14", "15",
990 "16", "17", "18", "19", "20", "21", "22", "23",
991 "24", "25", "26", "27", "28", "29", "30", "31",
992 "0", "1", "2", "3", "4", "5", "6", "7",
993 "8", "9", "10", "11", "12", "13", "14", "15",
994 "16", "17", "18", "19", "20", "21", "22", "23",
995 "24", "25", "26", "27", "28", "29", "30", "31",
996 "mq", "lr", "ctr","ap",
997 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
998 "xer",
999 /* AltiVec registers. */
0cd5e3a1
AH
1000 "0", "1", "2", "3", "4", "5", "6", "7",
1001 "8", "9", "10", "11", "12", "13", "14", "15",
1002 "16", "17", "18", "19", "20", "21", "22", "23",
1003 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1004 "vrsave", "vscr",
1005 /* SPE registers. */
7d5175e1
JJ
1006 "spe_acc", "spefscr",
1007 /* Soft frame pointer. */
1008 "sfp"
c81bebd7
MM
1009};
1010
1011#ifdef TARGET_REGNAMES
8b60264b 1012static const char alt_reg_names[][8] =
c81bebd7 1013{
802a0058
MM
1014 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1015 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1016 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1017 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1018 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1019 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1020 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1021 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1022 "mq", "lr", "ctr", "ap",
1023 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1024 "xer",
59a4c851 1025 /* AltiVec registers. */
0ac081f6 1026 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1027 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1028 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1029 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1030 "vrsave", "vscr",
1031 /* SPE registers. */
7d5175e1
JJ
1032 "spe_acc", "spefscr",
1033 /* Soft frame pointer. */
1034 "sfp"
c81bebd7
MM
1035};
1036#endif
9878760c 1037\f
daf11973
MM
1038#ifndef MASK_STRICT_ALIGN
1039#define MASK_STRICT_ALIGN 0
1040#endif
ffcfcb5f
AM
1041#ifndef TARGET_PROFILE_KERNEL
1042#define TARGET_PROFILE_KERNEL 0
1043#endif
3961e8fe
RH
1044
1045/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1046#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1047\f
1048/* Initialize the GCC target structure. */
91d231cb
JM
1049#undef TARGET_ATTRIBUTE_TABLE
1050#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1051#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1052#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1053
301d03af
RS
1054#undef TARGET_ASM_ALIGNED_DI_OP
1055#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1056
1057/* Default unaligned ops are only provided for ELF. Find the ops needed
1058 for non-ELF systems. */
1059#ifndef OBJECT_FORMAT_ELF
cbaaba19 1060#if TARGET_XCOFF
ae6c1efd 1061/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1062 64-bit targets. */
1063#undef TARGET_ASM_UNALIGNED_HI_OP
1064#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1065#undef TARGET_ASM_UNALIGNED_SI_OP
1066#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1067#undef TARGET_ASM_UNALIGNED_DI_OP
1068#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1069#else
1070/* For Darwin. */
1071#undef TARGET_ASM_UNALIGNED_HI_OP
1072#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1073#undef TARGET_ASM_UNALIGNED_SI_OP
1074#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1075#undef TARGET_ASM_UNALIGNED_DI_OP
1076#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1077#undef TARGET_ASM_ALIGNED_DI_OP
1078#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1079#endif
1080#endif
1081
1082/* This hook deals with fixups for relocatable code and DI-mode objects
1083 in 64-bit code. */
1084#undef TARGET_ASM_INTEGER
1085#define TARGET_ASM_INTEGER rs6000_assemble_integer
1086
93638d7a
AM
1087#ifdef HAVE_GAS_HIDDEN
1088#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1089#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1090#endif
1091
c4501e62
JJ
1092#undef TARGET_HAVE_TLS
1093#define TARGET_HAVE_TLS HAVE_AS_TLS
1094
1095#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1096#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1097
08c148a8
NB
1098#undef TARGET_ASM_FUNCTION_PROLOGUE
1099#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1100#undef TARGET_ASM_FUNCTION_EPILOGUE
1101#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1102
506d7b68
PB
1103#undef TARGET_LEGITIMIZE_ADDRESS
1104#define TARGET_LEGITIMIZE_ADDRESS rs6000_legitimize_address
1105
b54cf83a
DE
1106#undef TARGET_SCHED_VARIABLE_ISSUE
1107#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1108
c237e94a
ZW
1109#undef TARGET_SCHED_ISSUE_RATE
1110#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1111#undef TARGET_SCHED_ADJUST_COST
1112#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1113#undef TARGET_SCHED_ADJUST_PRIORITY
1114#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1115#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1116#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1117#undef TARGET_SCHED_INIT
1118#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1119#undef TARGET_SCHED_FINISH
1120#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1121#undef TARGET_SCHED_REORDER
1122#define TARGET_SCHED_REORDER rs6000_sched_reorder
1123#undef TARGET_SCHED_REORDER2
1124#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1125
be12c2b0
VM
1126#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1127#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1128
d296e02e
AP
1129#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1130#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1131
e855c69d
AB
1132#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1133#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1134#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1135#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1136#undef TARGET_SCHED_SET_SCHED_CONTEXT
1137#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1138#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1139#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1140
7ccf35ed
DN
1141#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1142#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1143#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1144#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1145#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1146#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1147#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1148#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1149#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1150#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1151
5b900a4c
DN
1152#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1153#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1154
0ac081f6
AH
1155#undef TARGET_INIT_BUILTINS
1156#define TARGET_INIT_BUILTINS rs6000_init_builtins
1157
1158#undef TARGET_EXPAND_BUILTIN
1159#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1160
608063c3
JB
1161#undef TARGET_MANGLE_TYPE
1162#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1163
c15c90bb
ZW
1164#undef TARGET_INIT_LIBFUNCS
1165#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1166
f1384257 1167#if TARGET_MACHO
0e5dbd9b 1168#undef TARGET_BINDS_LOCAL_P
31920d83 1169#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1170#endif
0e5dbd9b 1171
77ccdfed
EC
1172#undef TARGET_MS_BITFIELD_LAYOUT_P
1173#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1174
3961e8fe
RH
1175#undef TARGET_ASM_OUTPUT_MI_THUNK
1176#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1177
3961e8fe 1178#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1179#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1180
4977bab6
ZW
1181#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1182#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1183
2e3f0db6
DJ
1184#undef TARGET_INVALID_WITHIN_DOLOOP
1185#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1186
3c50106f
RH
1187#undef TARGET_RTX_COSTS
1188#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1189#undef TARGET_ADDRESS_COST
f40751dd 1190#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1191
96714395
AH
1192#undef TARGET_DWARF_REGISTER_SPAN
1193#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1194
37ea0b7e
JM
1195#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1196#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1197
c6e8c921
GK
1198/* On rs6000, function arguments are promoted, as are function return
1199 values. */
1200#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1201#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1202#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1203#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1204
c6e8c921
GK
1205#undef TARGET_RETURN_IN_MEMORY
1206#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1207
1208#undef TARGET_SETUP_INCOMING_VARARGS
1209#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1210
1211/* Always strict argument naming on rs6000. */
1212#undef TARGET_STRICT_ARGUMENT_NAMING
1213#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1214#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1215#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1216#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1217#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1218#undef TARGET_MUST_PASS_IN_STACK
1219#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1220#undef TARGET_PASS_BY_REFERENCE
1221#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1222#undef TARGET_ARG_PARTIAL_BYTES
1223#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1224
c35d187f
RH
1225#undef TARGET_BUILD_BUILTIN_VA_LIST
1226#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1227
d7bd8aeb
JJ
1228#undef TARGET_EXPAND_BUILTIN_VA_START
1229#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1230
cd3ce9b4
JM
1231#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1232#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1233
93f90be6
FJ
1234#undef TARGET_EH_RETURN_FILTER_MODE
1235#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1236
00b79d54
BE
1237#undef TARGET_SCALAR_MODE_SUPPORTED_P
1238#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1239
f676971a
EC
1240#undef TARGET_VECTOR_MODE_SUPPORTED_P
1241#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1242
4d3e6fae
FJ
1243#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1244#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1245
78f5898b
AH
1246#undef TARGET_HANDLE_OPTION
1247#define TARGET_HANDLE_OPTION rs6000_handle_option
1248
1249#undef TARGET_DEFAULT_TARGET_FLAGS
1250#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1251 (TARGET_DEFAULT)
78f5898b 1252
3aebbe5f
JJ
1253#undef TARGET_STACK_PROTECT_FAIL
1254#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1255
445cf5eb
JM
1256/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1257 The PowerPC architecture requires only weak consistency among
1258 processors--that is, memory accesses between processors need not be
1259 sequentially consistent and memory accesses among processors can occur
1260 in any order. The ability to order memory accesses weakly provides
1261 opportunities for more efficient use of the system bus. Unless a
1262 dependency exists, the 604e allows read operations to precede store
1263 operations. */
1264#undef TARGET_RELAXED_ORDERING
1265#define TARGET_RELAXED_ORDERING true
1266
fdbe66f2
EB
1267#ifdef HAVE_AS_TLS
1268#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1269#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1270#endif
1271
aacd3885
RS
1272/* Use a 32-bit anchor range. This leads to sequences like:
1273
1274 addis tmp,anchor,high
1275 add dest,tmp,low
1276
1277 where tmp itself acts as an anchor, and can be shared between
1278 accesses to the same 64k page. */
1279#undef TARGET_MIN_ANCHOR_OFFSET
1280#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1281#undef TARGET_MAX_ANCHOR_OFFSET
1282#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1283#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1284#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1285
9c78b944
DE
1286#undef TARGET_BUILTIN_RECIPROCAL
1287#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1288
e41b2a33
PB
1289#undef TARGET_EXPAND_TO_RTL_HOOK
1290#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1291
1292#undef TARGET_INSTANTIATE_DECLS
1293#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1294
c6c3dba9
PB
1295#undef TARGET_LEGITIMATE_ADDRESS_P
1296#define TARGET_LEGITIMATE_ADDRESS_P rs6000_legitimate_address_p
1297
f6897b10 1298struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1299\f
0d1fbc8c
AH
1300
1301/* Value is 1 if hard register REGNO can hold a value of machine-mode
1302 MODE. */
1303static int
1304rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1305{
1306 /* The GPRs can hold any mode, but values bigger than one register
1307 cannot go past R31. */
1308 if (INT_REGNO_P (regno))
1309 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1310
a5a97921 1311 /* The float registers can only hold floating modes and DImode.
7393f7f8 1312 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1313 if (FP_REGNO_P (regno))
1314 return
96038623 1315 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1316 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1317 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1318 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1319 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1320 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1321 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1322
1323 /* The CR register can only hold CC modes. */
1324 if (CR_REGNO_P (regno))
1325 return GET_MODE_CLASS (mode) == MODE_CC;
1326
1327 if (XER_REGNO_P (regno))
1328 return mode == PSImode;
1329
1330 /* AltiVec only in AldyVec registers. */
1331 if (ALTIVEC_REGNO_P (regno))
1332 return ALTIVEC_VECTOR_MODE (mode);
1333
1334 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1335 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1336 return 1;
1337
1338 /* We cannot put TImode anywhere except general register and it must be
1339 able to fit within the register set. */
1340
1341 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1342}
1343
1344/* Initialize rs6000_hard_regno_mode_ok_p table. */
1345static void
1346rs6000_init_hard_regno_mode_ok (void)
1347{
1348 int r, m;
1349
1350 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1351 for (m = 0; m < NUM_MACHINE_MODES; ++m)
bbbbb16a 1352 if (rs6000_hard_regno_mode_ok (r, (enum machine_mode) m))
0d1fbc8c
AH
1353 rs6000_hard_regno_mode_ok_p[m][r] = true;
1354}
1355
e4cad568
GK
1356#if TARGET_MACHO
1357/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1358
1359static void
1360darwin_rs6000_override_options (void)
1361{
1362 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1363 off. */
1364 rs6000_altivec_abi = 1;
1365 TARGET_ALTIVEC_VRSAVE = 1;
1366 if (DEFAULT_ABI == ABI_DARWIN)
1367 {
1368 if (MACHO_DYNAMIC_NO_PIC_P)
1369 {
1370 if (flag_pic)
1371 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1372 flag_pic = 0;
1373 }
1374 else if (flag_pic == 1)
1375 {
1376 flag_pic = 2;
1377 }
1378 }
1379 if (TARGET_64BIT && ! TARGET_POWERPC64)
1380 {
1381 target_flags |= MASK_POWERPC64;
1382 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1383 }
1384 if (flag_mkernel)
1385 {
1386 rs6000_default_long_calls = 1;
1387 target_flags |= MASK_SOFT_FLOAT;
1388 }
1389
1390 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1391 Altivec. */
1392 if (!flag_mkernel && !flag_apple_kext
1393 && TARGET_64BIT
1394 && ! (target_flags_explicit & MASK_ALTIVEC))
1395 target_flags |= MASK_ALTIVEC;
1396
1397 /* Unless the user (not the configurer) has explicitly overridden
1398 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1399 G4 unless targetting the kernel. */
1400 if (!flag_mkernel
1401 && !flag_apple_kext
1402 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1403 && ! (target_flags_explicit & MASK_ALTIVEC)
1404 && ! rs6000_select[1].string)
1405 {
1406 target_flags |= MASK_ALTIVEC;
1407 }
1408}
1409#endif
1410
c1e55850
GK
1411/* If not otherwise specified by a target, make 'long double' equivalent to
1412 'double'. */
1413
1414#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1415#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1416#endif
1417
5248c961
RK
1418/* Override command line options. Mostly we process the processor
1419 type and sometimes adjust other TARGET_ options. */
1420
1421void
d779d0dc 1422rs6000_override_options (const char *default_cpu)
5248c961 1423{
c4d38ccb 1424 size_t i, j;
8e3f41e7 1425 struct rs6000_cpu_select *ptr;
66188a7e 1426 int set_masks;
5248c961 1427
66188a7e 1428 /* Simplifications for entries below. */
85638c0d 1429
66188a7e
GK
1430 enum {
1431 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1432 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1433 };
85638c0d 1434
66188a7e
GK
1435 /* This table occasionally claims that a processor does not support
1436 a particular feature even though it does, but the feature is slower
1437 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1438 complete description of the processor's support.
66188a7e
GK
1439
1440 Please keep this list in order, and don't forget to update the
1441 documentation in invoke.texi when adding a new processor or
1442 flag. */
5248c961
RK
1443 static struct ptt
1444 {
8b60264b
KG
1445 const char *const name; /* Canonical processor name. */
1446 const enum processor_type processor; /* Processor type enum value. */
1447 const int target_enable; /* Target flags to enable. */
8b60264b 1448 } const processor_target_table[]
66188a7e 1449 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1450 {"403", PROCESSOR_PPC403,
66188a7e 1451 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1452 {"405", PROCESSOR_PPC405,
716019c0
JM
1453 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1454 {"405fp", PROCESSOR_PPC405,
1455 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1456 {"440", PROCESSOR_PPC440,
716019c0
JM
1457 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1458 {"440fp", PROCESSOR_PPC440,
1459 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1460 {"464", PROCESSOR_PPC440,
1461 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1462 {"464fp", PROCESSOR_PPC440,
1463 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1464 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1465 {"601", PROCESSOR_PPC601,
66188a7e
GK
1466 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1467 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1468 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1469 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1471 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1472 {"620", PROCESSOR_PPC620,
1473 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1474 {"630", PROCESSOR_PPC630,
1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1476 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1477 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1478 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1479 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1480 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1481 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1482 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1483 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1484 /* 8548 has a dummy entry for now. */
a45bce6e 1485 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1486 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1487 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1488 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1489 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1490 {"970", PROCESSOR_POWER4,
66188a7e 1491 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1492 {"cell", PROCESSOR_CELL,
1493 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1494 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1495 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1496 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1497 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1498 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1499 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1500 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1501 {"power2", PROCESSOR_POWER,
1502 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1503 {"power3", PROCESSOR_PPC630,
1504 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1505 {"power4", PROCESSOR_POWER4,
9a8d7941 1506 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1507 | MASK_MFCRF},
ec507f2d 1508 {"power5", PROCESSOR_POWER5,
9a8d7941 1509 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1510 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1511 {"power5+", PROCESSOR_POWER5,
9a8d7941 1512 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1513 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1514 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1515 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1516 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1517 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1518 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1519 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1520 | MASK_MFPGPR},
d40c9e33
PB
1521 {"power7", PROCESSOR_POWER5,
1522 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1523 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1524 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1525 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1526 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1527 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1528 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1529 {"rios2", PROCESSOR_RIOS2,
1530 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1531 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1532 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1533 {"rs64", PROCESSOR_RS64A,
1534 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1535 };
5248c961 1536
ca7558fc 1537 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1538
66188a7e
GK
1539 /* Some OSs don't support saving the high part of 64-bit registers on
1540 context switch. Other OSs don't support saving Altivec registers.
1541 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1542 settings; if the user wants either, the user must explicitly specify
1543 them and we won't interfere with the user's specification. */
1544
1545 enum {
1546 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1547 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1548 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1549 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1550 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1551 };
0d1fbc8c 1552
c4ad648e 1553 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1554#ifdef OS_MISSING_POWERPC64
1555 if (OS_MISSING_POWERPC64)
1556 set_masks &= ~MASK_POWERPC64;
1557#endif
1558#ifdef OS_MISSING_ALTIVEC
1559 if (OS_MISSING_ALTIVEC)
1560 set_masks &= ~MASK_ALTIVEC;
1561#endif
1562
768875a8
AM
1563 /* Don't override by the processor default if given explicitly. */
1564 set_masks &= ~target_flags_explicit;
957211c3 1565
a4f6c312 1566 /* Identify the processor type. */
8e3f41e7 1567 rs6000_select[0].string = default_cpu;
3cb999d8 1568 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1569
b6a1cbae 1570 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1571 {
8e3f41e7
MM
1572 ptr = &rs6000_select[i];
1573 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1574 {
8e3f41e7
MM
1575 for (j = 0; j < ptt_size; j++)
1576 if (! strcmp (ptr->string, processor_target_table[j].name))
1577 {
1578 if (ptr->set_tune_p)
1579 rs6000_cpu = processor_target_table[j].processor;
1580
1581 if (ptr->set_arch_p)
1582 {
66188a7e
GK
1583 target_flags &= ~set_masks;
1584 target_flags |= (processor_target_table[j].target_enable
1585 & set_masks);
8e3f41e7
MM
1586 }
1587 break;
1588 }
1589
4406229e 1590 if (j == ptt_size)
8e3f41e7 1591 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1592 }
1593 }
8a61d227 1594
edae5fe3
DE
1595 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1596 && !rs6000_explicit_options.isel)
a3170dc6
AH
1597 rs6000_isel = 1;
1598
edae5fe3
DE
1599 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1600 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1601 {
1602 if (TARGET_ALTIVEC)
1603 error ("AltiVec not supported in this target");
1604 if (TARGET_SPE)
1605 error ("Spe not supported in this target");
1606 }
1607
25696a75 1608 /* Disable Cell microcode if we are optimizing for the Cell
c921bad8
AP
1609 and not optimizing for size. */
1610 if (rs6000_gen_cell_microcode == -1)
1611 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1612 && !optimize_size);
1613
dff9f1b6 1614 /* If we are optimizing big endian systems for space, use the load/store
c921bad8
AP
1615 multiple and string instructions unless we are not generating
1616 Cell microcode. */
1617 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
957211c3 1618 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1619
a4f6c312
SS
1620 /* Don't allow -mmultiple or -mstring on little endian systems
1621 unless the cpu is a 750, because the hardware doesn't support the
1622 instructions used in little endian mode, and causes an alignment
1623 trap. The 750 does not cause an alignment trap (except when the
1624 target is unaligned). */
bef84347 1625
b21fb038 1626 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1627 {
1628 if (TARGET_MULTIPLE)
1629 {
1630 target_flags &= ~MASK_MULTIPLE;
b21fb038 1631 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1632 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1633 }
1634
1635 if (TARGET_STRING)
1636 {
1637 target_flags &= ~MASK_STRING;
b21fb038 1638 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1639 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1640 }
1641 }
3933e0e1 1642
38c1f2d7
MM
1643 /* Set debug flags */
1644 if (rs6000_debug_name)
1645 {
bfc79d3b 1646 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1647 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1648 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1649 rs6000_debug_stack = 1;
bfc79d3b 1650 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1651 rs6000_debug_arg = 1;
1652 else
c725bd79 1653 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1654 }
1655
57ac7be9
AM
1656 if (rs6000_traceback_name)
1657 {
1658 if (! strncmp (rs6000_traceback_name, "full", 4))
1659 rs6000_traceback = traceback_full;
1660 else if (! strncmp (rs6000_traceback_name, "part", 4))
1661 rs6000_traceback = traceback_part;
1662 else if (! strncmp (rs6000_traceback_name, "no", 2))
1663 rs6000_traceback = traceback_none;
1664 else
9e637a26 1665 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1666 rs6000_traceback_name);
1667 }
1668
78f5898b
AH
1669 if (!rs6000_explicit_options.long_double)
1670 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1671
602ea4d3 1672#ifndef POWERPC_LINUX
d3603e8c 1673 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1674 rs6000_ieeequad = 1;
1675#endif
1676
0db747be
DE
1677 /* Enable Altivec ABI for AIX -maltivec. */
1678 if (TARGET_XCOFF && TARGET_ALTIVEC)
1679 rs6000_altivec_abi = 1;
1680
a2db2771
JJ
1681 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1682 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1683 be explicitly overridden in either case. */
1684 if (TARGET_ELF)
6d0ef01e 1685 {
a2db2771
JJ
1686 if (!rs6000_explicit_options.altivec_abi
1687 && (TARGET_64BIT || TARGET_ALTIVEC))
1688 rs6000_altivec_abi = 1;
1689
1690 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1691 if (!rs6000_explicit_options.vrsave)
1692 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1693 }
1694
594a51fe
SS
1695 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1696 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1697 {
1698 rs6000_darwin64_abi = 1;
9c7956fd 1699#if TARGET_MACHO
6ac49599 1700 darwin_one_byte_bool = 1;
9c7956fd 1701#endif
d9168963
SS
1702 /* Default to natural alignment, for better performance. */
1703 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1704 }
1705
194c524a
DE
1706 /* Place FP constants in the constant pool instead of TOC
1707 if section anchors enabled. */
1708 if (flag_section_anchors)
1709 TARGET_NO_FP_IN_TOC = 1;
1710
c4501e62
JJ
1711 /* Handle -mtls-size option. */
1712 rs6000_parse_tls_size_option ();
1713
a7ae18e2
AH
1714#ifdef SUBTARGET_OVERRIDE_OPTIONS
1715 SUBTARGET_OVERRIDE_OPTIONS;
1716#endif
1717#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1718 SUBSUBTARGET_OVERRIDE_OPTIONS;
1719#endif
4d4cbc0e
AH
1720#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1721 SUB3TARGET_OVERRIDE_OPTIONS;
1722#endif
a7ae18e2 1723
edae5fe3 1724 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1725 {
edae5fe3 1726 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1727 MASK_STRING above when optimizing for size. */
1728 if ((target_flags & MASK_STRING) != 0)
1729 target_flags = target_flags & ~MASK_STRING;
1730 }
1731 else if (rs6000_select[1].string != NULL)
1732 {
1733 /* For the powerpc-eabispe configuration, we set all these by
1734 default, so let's unset them if we manually set another
1735 CPU that is not the E500. */
a2db2771 1736 if (!rs6000_explicit_options.spe_abi)
5da702b1 1737 rs6000_spe_abi = 0;
78f5898b 1738 if (!rs6000_explicit_options.spe)
5da702b1 1739 rs6000_spe = 0;
78f5898b 1740 if (!rs6000_explicit_options.float_gprs)
5da702b1 1741 rs6000_float_gprs = 0;
78f5898b 1742 if (!rs6000_explicit_options.isel)
5da702b1
AH
1743 rs6000_isel = 0;
1744 }
b5044283 1745
eca0d5e8
JM
1746 /* Detect invalid option combinations with E500. */
1747 CHECK_E500_OPTIONS;
1748
ec507f2d 1749 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1750 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1751 && rs6000_cpu != PROCESSOR_POWER6
1752 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1753 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1754 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1755 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1756 || rs6000_cpu == PROCESSOR_POWER5
1757 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1758
ec507f2d
DE
1759 rs6000_sched_restricted_insns_priority
1760 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1761
569fa502 1762 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1763 rs6000_sched_costly_dep
1764 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1765
569fa502
DN
1766 if (rs6000_sched_costly_dep_str)
1767 {
f676971a 1768 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1769 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1770 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1771 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1772 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1773 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1774 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1775 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1776 else
32e8bb8e
ILT
1777 rs6000_sched_costly_dep = ((enum rs6000_dependence_cost)
1778 atoi (rs6000_sched_costly_dep_str));
cbe26ab8
DN
1779 }
1780
1781 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1782 rs6000_sched_insert_nops
1783 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1784
cbe26ab8
DN
1785 if (rs6000_sched_insert_nops_str)
1786 {
1787 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1788 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1789 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1790 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1791 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1792 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1793 else
32e8bb8e
ILT
1794 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion)
1795 atoi (rs6000_sched_insert_nops_str));
569fa502
DN
1796 }
1797
c81bebd7 1798#ifdef TARGET_REGNAMES
a4f6c312
SS
1799 /* If the user desires alternate register names, copy in the
1800 alternate names now. */
c81bebd7 1801 if (TARGET_REGNAMES)
4e135bdd 1802 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1803#endif
1804
df01da37 1805 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1806 If -maix-struct-return or -msvr4-struct-return was explicitly
1807 used, don't override with the ABI default. */
df01da37
DE
1808 if (!rs6000_explicit_options.aix_struct_ret)
1809 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1810
602ea4d3 1811 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1812 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1813
f676971a 1814 if (TARGET_TOC)
9ebbca7d 1815 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1816
301d03af
RS
1817 /* We can only guarantee the availability of DI pseudo-ops when
1818 assembling for 64-bit targets. */
ae6c1efd 1819 if (!TARGET_64BIT)
301d03af
RS
1820 {
1821 targetm.asm_out.aligned_op.di = NULL;
1822 targetm.asm_out.unaligned_op.di = NULL;
1823 }
1824
1494c534
DE
1825 /* Set branch target alignment, if not optimizing for size. */
1826 if (!optimize_size)
1827 {
d296e02e
AP
1828 /* Cell wants to be aligned 8byte for dual issue. */
1829 if (rs6000_cpu == PROCESSOR_CELL)
1830 {
1831 if (align_functions <= 0)
1832 align_functions = 8;
1833 if (align_jumps <= 0)
1834 align_jumps = 8;
1835 if (align_loops <= 0)
1836 align_loops = 8;
1837 }
44cd321e 1838 if (rs6000_align_branch_targets)
1494c534
DE
1839 {
1840 if (align_functions <= 0)
1841 align_functions = 16;
1842 if (align_jumps <= 0)
1843 align_jumps = 16;
1844 if (align_loops <= 0)
1845 align_loops = 16;
1846 }
1847 if (align_jumps_max_skip <= 0)
1848 align_jumps_max_skip = 15;
1849 if (align_loops_max_skip <= 0)
1850 align_loops_max_skip = 15;
1851 }
2792d578 1852
71f123ca
FS
1853 /* Arrange to save and restore machine status around nested functions. */
1854 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1855
1856 /* We should always be splitting complex arguments, but we can't break
1857 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1858 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1859 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1860
1861 /* Initialize rs6000_cost with the appropriate target costs. */
1862 if (optimize_size)
1863 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1864 else
1865 switch (rs6000_cpu)
1866 {
1867 case PROCESSOR_RIOS1:
1868 rs6000_cost = &rios1_cost;
1869 break;
1870
1871 case PROCESSOR_RIOS2:
1872 rs6000_cost = &rios2_cost;
1873 break;
1874
1875 case PROCESSOR_RS64A:
1876 rs6000_cost = &rs64a_cost;
1877 break;
1878
1879 case PROCESSOR_MPCCORE:
1880 rs6000_cost = &mpccore_cost;
1881 break;
1882
1883 case PROCESSOR_PPC403:
1884 rs6000_cost = &ppc403_cost;
1885 break;
1886
1887 case PROCESSOR_PPC405:
1888 rs6000_cost = &ppc405_cost;
1889 break;
1890
1891 case PROCESSOR_PPC440:
1892 rs6000_cost = &ppc440_cost;
1893 break;
1894
1895 case PROCESSOR_PPC601:
1896 rs6000_cost = &ppc601_cost;
1897 break;
1898
1899 case PROCESSOR_PPC603:
1900 rs6000_cost = &ppc603_cost;
1901 break;
1902
1903 case PROCESSOR_PPC604:
1904 rs6000_cost = &ppc604_cost;
1905 break;
1906
1907 case PROCESSOR_PPC604e:
1908 rs6000_cost = &ppc604e_cost;
1909 break;
1910
1911 case PROCESSOR_PPC620:
8b897cfa
RS
1912 rs6000_cost = &ppc620_cost;
1913 break;
1914
f0517163
RS
1915 case PROCESSOR_PPC630:
1916 rs6000_cost = &ppc630_cost;
1917 break;
1918
982afe02 1919 case PROCESSOR_CELL:
d296e02e
AP
1920 rs6000_cost = &ppccell_cost;
1921 break;
1922
8b897cfa
RS
1923 case PROCESSOR_PPC750:
1924 case PROCESSOR_PPC7400:
1925 rs6000_cost = &ppc750_cost;
1926 break;
1927
1928 case PROCESSOR_PPC7450:
1929 rs6000_cost = &ppc7450_cost;
1930 break;
1931
1932 case PROCESSOR_PPC8540:
1933 rs6000_cost = &ppc8540_cost;
1934 break;
1935
fa41c305
EW
1936 case PROCESSOR_PPCE300C2:
1937 case PROCESSOR_PPCE300C3:
1938 rs6000_cost = &ppce300c2c3_cost;
1939 break;
1940
edae5fe3
DE
1941 case PROCESSOR_PPCE500MC:
1942 rs6000_cost = &ppce500mc_cost;
1943 break;
1944
8b897cfa
RS
1945 case PROCESSOR_POWER4:
1946 case PROCESSOR_POWER5:
1947 rs6000_cost = &power4_cost;
1948 break;
1949
44cd321e
PS
1950 case PROCESSOR_POWER6:
1951 rs6000_cost = &power6_cost;
1952 break;
1953
8b897cfa 1954 default:
37409796 1955 gcc_unreachable ();
8b897cfa 1956 }
0b11da67
DE
1957
1958 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1959 set_param_value ("simultaneous-prefetches",
1960 rs6000_cost->simultaneous_prefetches);
1961 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1962 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1963 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1964 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1965 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1966 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1967
1968 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1969 can be optimized to ap = __builtin_next_arg (0). */
1970 if (DEFAULT_ABI != ABI_V4)
1971 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
1972
1973 /* Set up single/double float flags.
1974 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1975 then set both flags. */
1976 if (TARGET_HARD_FLOAT && TARGET_FPRS
1977 && rs6000_single_float == 0 && rs6000_double_float == 0)
1978 rs6000_single_float = rs6000_double_float = 1;
1979
1980 /* Reset single and double FP flags if target is E500. */
1981 if (TARGET_E500)
1982 {
1983 rs6000_single_float = rs6000_double_float = 0;
1984 if (TARGET_E500_SINGLE)
1985 rs6000_single_float = 1;
1986 if (TARGET_E500_DOUBLE)
1987 rs6000_single_float = rs6000_double_float = 1;
1988 }
1989
001b9eb6
PH
1990 /* If not explicitly specified via option, decide whether to generate indexed
1991 load/store instructions. */
1992 if (TARGET_AVOID_XFORM == -1)
1993 /* Avoid indexed addressing when targeting Power6 in order to avoid
1994 the DERAT mispredict penalty. */
1995 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
1996
696e45ba 1997 rs6000_init_hard_regno_mode_ok ();
5248c961 1998}
5accd822 1999
7ccf35ed
DN
2000/* Implement targetm.vectorize.builtin_mask_for_load. */
2001static tree
2002rs6000_builtin_mask_for_load (void)
2003{
2004 if (TARGET_ALTIVEC)
2005 return altivec_builtin_mask_for_load;
2006 else
2007 return 0;
2008}
2009
7910ae0c
DN
2010/* Implement targetm.vectorize.builtin_conversion.
2011 Returns a decl of a function that implements conversion of an integer vector
2012 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2013 side of the conversion.
2014 Return NULL_TREE if it is not available. */
f57d17f1 2015static tree
744aa42f 2016rs6000_builtin_conversion (unsigned int tcode, tree type)
f57d17f1 2017{
744aa42f
ILT
2018 enum tree_code code = (enum tree_code) tcode;
2019
f57d17f1
TM
2020 if (!TARGET_ALTIVEC)
2021 return NULL_TREE;
982afe02 2022
f57d17f1
TM
2023 switch (code)
2024 {
7910ae0c
DN
2025 case FIX_TRUNC_EXPR:
2026 switch (TYPE_MODE (type))
2027 {
2028 case V4SImode:
2029 return TYPE_UNSIGNED (type)
2030 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2031 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2032 default:
2033 return NULL_TREE;
2034 }
2035
f57d17f1
TM
2036 case FLOAT_EXPR:
2037 switch (TYPE_MODE (type))
2038 {
2039 case V4SImode:
7910ae0c
DN
2040 return TYPE_UNSIGNED (type)
2041 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2042 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2043 default:
2044 return NULL_TREE;
2045 }
7910ae0c 2046
f57d17f1
TM
2047 default:
2048 return NULL_TREE;
2049 }
2050}
2051
89d67cca
DN
2052/* Implement targetm.vectorize.builtin_mul_widen_even. */
2053static tree
2054rs6000_builtin_mul_widen_even (tree type)
2055{
2056 if (!TARGET_ALTIVEC)
2057 return NULL_TREE;
2058
2059 switch (TYPE_MODE (type))
2060 {
2061 case V8HImode:
7910ae0c
DN
2062 return TYPE_UNSIGNED (type)
2063 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2064 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2065
2066 case V16QImode:
7910ae0c
DN
2067 return TYPE_UNSIGNED (type)
2068 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2069 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2070 default:
2071 return NULL_TREE;
2072 }
2073}
2074
2075/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2076static tree
2077rs6000_builtin_mul_widen_odd (tree type)
2078{
2079 if (!TARGET_ALTIVEC)
2080 return NULL_TREE;
2081
2082 switch (TYPE_MODE (type))
2083 {
2084 case V8HImode:
7910ae0c
DN
2085 return TYPE_UNSIGNED (type)
2086 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2087 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2088
2089 case V16QImode:
7910ae0c
DN
2090 return TYPE_UNSIGNED (type)
2091 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2092 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2093 default:
2094 return NULL_TREE;
2095 }
2096}
2097
5b900a4c
DN
2098
2099/* Return true iff, data reference of TYPE can reach vector alignment (16)
2100 after applying N number of iterations. This routine does not determine
2101 how may iterations are required to reach desired alignment. */
2102
2103static bool
3101faab 2104rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2105{
2106 if (is_packed)
2107 return false;
2108
2109 if (TARGET_32BIT)
2110 {
2111 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2112 return true;
2113
2114 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2115 return true;
2116
2117 return false;
2118 }
2119 else
2120 {
2121 if (TARGET_MACHO)
2122 return false;
2123
2124 /* Assuming that all other types are naturally aligned. CHECKME! */
2125 return true;
2126 }
2127}
2128
0fca40f5
IR
2129/* Implement targetm.vectorize.builtin_vec_perm. */
2130tree
2131rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2132{
2133 tree d;
2134
2135 *mask_element_type = unsigned_char_type_node;
2136
2137 switch (TYPE_MODE (type))
2138 {
2139 case V16QImode:
2140 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2141 break;
2142
2143 case V8HImode:
2144 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2145 break;
2146
2147 case V4SImode:
2148 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2149 break;
2150
2151 case V4SFmode:
2152 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2153 break;
2154
2155 default:
2156 return NULL_TREE;
2157 }
2158
2159 gcc_assert (d);
2160 return d;
2161}
2162
5da702b1
AH
2163/* Handle generic options of the form -mfoo=yes/no.
2164 NAME is the option name.
2165 VALUE is the option value.
2166 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2167 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2168static void
5da702b1 2169rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2170{
5da702b1 2171 if (value == 0)
993f19a8 2172 return;
5da702b1
AH
2173 else if (!strcmp (value, "yes"))
2174 *flag = 1;
2175 else if (!strcmp (value, "no"))
2176 *flag = 0;
08b57fb3 2177 else
5da702b1 2178 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2179}
2180
c4501e62
JJ
2181/* Validate and record the size specified with the -mtls-size option. */
2182
2183static void
863d938c 2184rs6000_parse_tls_size_option (void)
c4501e62
JJ
2185{
2186 if (rs6000_tls_size_string == 0)
2187 return;
2188 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2189 rs6000_tls_size = 16;
2190 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2191 rs6000_tls_size = 32;
2192 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2193 rs6000_tls_size = 64;
2194 else
9e637a26 2195 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2196}
2197
5accd822 2198void
a2369ed3 2199optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2200{
2e3f0db6
DJ
2201 if (DEFAULT_ABI == ABI_DARWIN)
2202 /* The Darwin libraries never set errno, so we might as well
2203 avoid calling them when that's the only reason we would. */
2204 flag_errno_math = 0;
59d6560b
DE
2205
2206 /* Double growth factor to counter reduced min jump length. */
2207 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2208
2209 /* Enable section anchors by default.
2210 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2211 until front-ends fixed. */
2212 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2213 flag_section_anchors = 2;
5accd822 2214}
78f5898b 2215
0bb7b92e
ME
2216static enum fpu_type_t
2217rs6000_parse_fpu_option (const char *option)
2218{
2219 if (!strcmp("none", option)) return FPU_NONE;
2220 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2221 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2222 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2223 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2224 error("unknown value %s for -mfpu", option);
2225 return FPU_NONE;
2226}
2227
78f5898b
AH
2228/* Implement TARGET_HANDLE_OPTION. */
2229
2230static bool
2231rs6000_handle_option (size_t code, const char *arg, int value)
2232{
0bb7b92e
ME
2233 enum fpu_type_t fpu_type = FPU_NONE;
2234
78f5898b
AH
2235 switch (code)
2236 {
2237 case OPT_mno_power:
2238 target_flags &= ~(MASK_POWER | MASK_POWER2
2239 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2240 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2241 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2242 break;
2243 case OPT_mno_powerpc:
2244 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2245 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2246 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2247 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2248 break;
2249 case OPT_mfull_toc:
d2894ab5
DE
2250 target_flags &= ~MASK_MINIMAL_TOC;
2251 TARGET_NO_FP_IN_TOC = 0;
2252 TARGET_NO_SUM_IN_TOC = 0;
2253 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2254#ifdef TARGET_USES_SYSV4_OPT
2255 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2256 just the same as -mminimal-toc. */
2257 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2258 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2259#endif
2260 break;
2261
2262#ifdef TARGET_USES_SYSV4_OPT
2263 case OPT_mtoc:
2264 /* Make -mtoc behave like -mminimal-toc. */
2265 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2266 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2267 break;
2268#endif
2269
2270#ifdef TARGET_USES_AIX64_OPT
2271 case OPT_maix64:
2272#else
2273 case OPT_m64:
2274#endif
2c9c9afd
AM
2275 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2276 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2277 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2278 break;
2279
2280#ifdef TARGET_USES_AIX64_OPT
2281 case OPT_maix32:
2282#else
2283 case OPT_m32:
2284#endif
2285 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2286 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2287 break;
2288
2289 case OPT_minsert_sched_nops_:
2290 rs6000_sched_insert_nops_str = arg;
2291 break;
2292
2293 case OPT_mminimal_toc:
2294 if (value == 1)
2295 {
d2894ab5
DE
2296 TARGET_NO_FP_IN_TOC = 0;
2297 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2298 }
2299 break;
2300
2301 case OPT_mpower:
2302 if (value == 1)
c2dba4ab
AH
2303 {
2304 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2305 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2306 }
78f5898b
AH
2307 break;
2308
2309 case OPT_mpower2:
2310 if (value == 1)
c2dba4ab
AH
2311 {
2312 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2313 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2314 }
78f5898b
AH
2315 break;
2316
2317 case OPT_mpowerpc_gpopt:
2318 case OPT_mpowerpc_gfxopt:
2319 if (value == 1)
c2dba4ab
AH
2320 {
2321 target_flags |= MASK_POWERPC;
2322 target_flags_explicit |= MASK_POWERPC;
2323 }
78f5898b
AH
2324 break;
2325
df01da37
DE
2326 case OPT_maix_struct_return:
2327 case OPT_msvr4_struct_return:
2328 rs6000_explicit_options.aix_struct_ret = true;
2329 break;
2330
b5e3caf2
BE
2331 case OPT_mvrsave:
2332 rs6000_explicit_options.vrsave = true;
2333 TARGET_ALTIVEC_VRSAVE = value;
2334 break;
2335
78f5898b 2336 case OPT_mvrsave_:
a2db2771 2337 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2338 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2339 break;
78f5898b 2340
94f4765c
NF
2341 case OPT_misel:
2342 rs6000_explicit_options.isel = true;
2343 rs6000_isel = value;
2344 break;
2345
78f5898b
AH
2346 case OPT_misel_:
2347 rs6000_explicit_options.isel = true;
2348 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2349 break;
2350
94f4765c
NF
2351 case OPT_mspe:
2352 rs6000_explicit_options.spe = true;
2353 rs6000_spe = value;
2354 break;
2355
78f5898b
AH
2356 case OPT_mspe_:
2357 rs6000_explicit_options.spe = true;
2358 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2359 break;
2360
2361 case OPT_mdebug_:
2362 rs6000_debug_name = arg;
2363 break;
2364
2365#ifdef TARGET_USES_SYSV4_OPT
2366 case OPT_mcall_:
2367 rs6000_abi_name = arg;
2368 break;
2369
2370 case OPT_msdata_:
2371 rs6000_sdata_name = arg;
2372 break;
2373
2374 case OPT_mtls_size_:
2375 rs6000_tls_size_string = arg;
2376 break;
2377
2378 case OPT_mrelocatable:
2379 if (value == 1)
c2dba4ab 2380 {
e0bf274f
AM
2381 target_flags |= MASK_MINIMAL_TOC;
2382 target_flags_explicit |= MASK_MINIMAL_TOC;
2383 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2384 }
78f5898b
AH
2385 break;
2386
2387 case OPT_mrelocatable_lib:
2388 if (value == 1)
c2dba4ab 2389 {
e0bf274f
AM
2390 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2391 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2392 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2393 }
78f5898b 2394 else
c2dba4ab
AH
2395 {
2396 target_flags &= ~MASK_RELOCATABLE;
2397 target_flags_explicit |= MASK_RELOCATABLE;
2398 }
78f5898b
AH
2399 break;
2400#endif
2401
2402 case OPT_mabi_:
78f5898b
AH
2403 if (!strcmp (arg, "altivec"))
2404 {
a2db2771 2405 rs6000_explicit_options.altivec_abi = true;
78f5898b 2406 rs6000_altivec_abi = 1;
a2db2771
JJ
2407
2408 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2409 rs6000_spe_abi = 0;
2410 }
2411 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2412 {
a2db2771 2413 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2414 rs6000_altivec_abi = 0;
2415 }
78f5898b
AH
2416 else if (! strcmp (arg, "spe"))
2417 {
a2db2771 2418 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2419 rs6000_spe_abi = 1;
2420 rs6000_altivec_abi = 0;
2421 if (!TARGET_SPE_ABI)
2422 error ("not configured for ABI: '%s'", arg);
2423 }
2424 else if (! strcmp (arg, "no-spe"))
d3603e8c 2425 {
a2db2771 2426 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2427 rs6000_spe_abi = 0;
2428 }
78f5898b
AH
2429
2430 /* These are here for testing during development only, do not
2431 document in the manual please. */
2432 else if (! strcmp (arg, "d64"))
2433 {
2434 rs6000_darwin64_abi = 1;
2435 warning (0, "Using darwin64 ABI");
2436 }
2437 else if (! strcmp (arg, "d32"))
2438 {
2439 rs6000_darwin64_abi = 0;
2440 warning (0, "Using old darwin ABI");
2441 }
2442
602ea4d3
JJ
2443 else if (! strcmp (arg, "ibmlongdouble"))
2444 {
d3603e8c 2445 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2446 rs6000_ieeequad = 0;
2447 warning (0, "Using IBM extended precision long double");
2448 }
2449 else if (! strcmp (arg, "ieeelongdouble"))
2450 {
d3603e8c 2451 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2452 rs6000_ieeequad = 1;
2453 warning (0, "Using IEEE extended precision long double");
2454 }
2455
78f5898b
AH
2456 else
2457 {
2458 error ("unknown ABI specified: '%s'", arg);
2459 return false;
2460 }
2461 break;
2462
2463 case OPT_mcpu_:
2464 rs6000_select[1].string = arg;
2465 break;
2466
2467 case OPT_mtune_:
2468 rs6000_select[2].string = arg;
2469 break;
2470
2471 case OPT_mtraceback_:
2472 rs6000_traceback_name = arg;
2473 break;
2474
2475 case OPT_mfloat_gprs_:
2476 rs6000_explicit_options.float_gprs = true;
2477 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2478 rs6000_float_gprs = 1;
2479 else if (! strcmp (arg, "double"))
2480 rs6000_float_gprs = 2;
2481 else if (! strcmp (arg, "no"))
2482 rs6000_float_gprs = 0;
2483 else
2484 {
2485 error ("invalid option for -mfloat-gprs: '%s'", arg);
2486 return false;
2487 }
2488 break;
2489
2490 case OPT_mlong_double_:
2491 rs6000_explicit_options.long_double = true;
2492 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2493 if (value != 64 && value != 128)
2494 {
2495 error ("Unknown switch -mlong-double-%s", arg);
2496 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2497 return false;
2498 }
2499 else
2500 rs6000_long_double_type_size = value;
2501 break;
2502
2503 case OPT_msched_costly_dep_:
2504 rs6000_sched_costly_dep_str = arg;
2505 break;
2506
2507 case OPT_malign_:
2508 rs6000_explicit_options.alignment = true;
2509 if (! strcmp (arg, "power"))
2510 {
2511 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2512 some C library functions, so warn about it. The flag may be
2513 useful for performance studies from time to time though, so
2514 don't disable it entirely. */
2515 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2516 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2517 " it is incompatible with the installed C and C++ libraries");
2518 rs6000_alignment_flags = MASK_ALIGN_POWER;
2519 }
2520 else if (! strcmp (arg, "natural"))
2521 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2522 else
2523 {
2524 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2525 return false;
2526 }
2527 break;
696e45ba
ME
2528
2529 case OPT_msingle_float:
2530 if (!TARGET_SINGLE_FPU)
2531 warning (0, "-msingle-float option equivalent to -mhard-float");
2532 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2533 rs6000_double_float = 0;
2534 target_flags &= ~MASK_SOFT_FLOAT;
2535 target_flags_explicit |= MASK_SOFT_FLOAT;
2536 break;
2537
2538 case OPT_mdouble_float:
2539 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2540 rs6000_single_float = 1;
2541 target_flags &= ~MASK_SOFT_FLOAT;
2542 target_flags_explicit |= MASK_SOFT_FLOAT;
2543 break;
2544
2545 case OPT_msimple_fpu:
2546 if (!TARGET_SINGLE_FPU)
2547 warning (0, "-msimple-fpu option ignored");
2548 break;
2549
2550 case OPT_mhard_float:
2551 /* -mhard_float implies -msingle-float and -mdouble-float. */
2552 rs6000_single_float = rs6000_double_float = 1;
2553 break;
2554
2555 case OPT_msoft_float:
2556 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2557 rs6000_single_float = rs6000_double_float = 0;
2558 break;
0bb7b92e
ME
2559
2560 case OPT_mfpu_:
2561 fpu_type = rs6000_parse_fpu_option(arg);
2562 if (fpu_type != FPU_NONE)
2563 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2564 {
2565 target_flags &= ~MASK_SOFT_FLOAT;
2566 target_flags_explicit |= MASK_SOFT_FLOAT;
2567 rs6000_xilinx_fpu = 1;
2568 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2569 rs6000_single_float = 1;
2570 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2571 rs6000_single_float = rs6000_double_float = 1;
2572 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2573 rs6000_simple_fpu = 1;
2574 }
2575 else
2576 {
2577 /* -mfpu=none is equivalent to -msoft-float */
2578 target_flags |= MASK_SOFT_FLOAT;
2579 target_flags_explicit |= MASK_SOFT_FLOAT;
2580 rs6000_single_float = rs6000_double_float = 0;
2581 }
2582 break;
78f5898b
AH
2583 }
2584 return true;
2585}
3cfa4909
MM
2586\f
2587/* Do anything needed at the start of the asm file. */
2588
1bc7c5b6 2589static void
863d938c 2590rs6000_file_start (void)
3cfa4909 2591{
c4d38ccb 2592 size_t i;
3cfa4909 2593 char buffer[80];
d330fd93 2594 const char *start = buffer;
3cfa4909 2595 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2596 const char *default_cpu = TARGET_CPU_DEFAULT;
2597 FILE *file = asm_out_file;
2598
2599 default_file_start ();
2600
2601#ifdef TARGET_BI_ARCH
2602 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2603 default_cpu = 0;
2604#endif
3cfa4909
MM
2605
2606 if (flag_verbose_asm)
2607 {
2608 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2609 rs6000_select[0].string = default_cpu;
2610
b6a1cbae 2611 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2612 {
2613 ptr = &rs6000_select[i];
2614 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2615 {
2616 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2617 start = "";
2618 }
2619 }
2620
9c6b4ed9 2621 if (PPC405_ERRATUM77)
b0bfee6e 2622 {
9c6b4ed9 2623 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2624 start = "";
2625 }
b0bfee6e 2626
b91da81f 2627#ifdef USING_ELFOS_H
3cfa4909
MM
2628 switch (rs6000_sdata)
2629 {
2630 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2631 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2632 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2633 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2634 }
2635
2636 if (rs6000_sdata && g_switch_value)
2637 {
307b599c
MK
2638 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2639 g_switch_value);
3cfa4909
MM
2640 start = "";
2641 }
2642#endif
2643
2644 if (*start == '\0')
949ea356 2645 putc ('\n', file);
3cfa4909 2646 }
b723e82f 2647
e51917ae
JM
2648#ifdef HAVE_AS_GNU_ATTRIBUTE
2649 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2650 {
2651 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
2652 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2653 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2654 : 2));
aaa42494
DJ
2655 fprintf (file, "\t.gnu_attribute 8, %d\n",
2656 (TARGET_ALTIVEC_ABI ? 2
2657 : TARGET_SPE_ABI ? 3
2658 : 1));
f9fd1e77
NF
2659 fprintf (file, "\t.gnu_attribute 12, %d\n",
2660 aix_struct_return ? 2 : 1);
2661
aaa42494 2662 }
e51917ae
JM
2663#endif
2664
b723e82f
JJ
2665 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2666 {
d6b5193b
RS
2667 switch_to_section (toc_section);
2668 switch_to_section (text_section);
b723e82f 2669 }
3cfa4909 2670}
c4e18b1c 2671
5248c961 2672\f
a0ab749a 2673/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2674
2675int
863d938c 2676direct_return (void)
9878760c 2677{
4697a36c
MM
2678 if (reload_completed)
2679 {
2680 rs6000_stack_t *info = rs6000_stack_info ();
2681
2682 if (info->first_gp_reg_save == 32
2683 && info->first_fp_reg_save == 64
00b960c7 2684 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2685 && ! info->lr_save_p
2686 && ! info->cr_save_p
00b960c7 2687 && info->vrsave_mask == 0
c81fc13e 2688 && ! info->push_p)
4697a36c
MM
2689 return 1;
2690 }
2691
2692 return 0;
9878760c
RK
2693}
2694
4e74d8ec
MM
2695/* Return the number of instructions it takes to form a constant in an
2696 integer register. */
2697
48d72335 2698int
a2369ed3 2699num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2700{
2701 /* signed constant loadable with {cal|addi} */
547b216d 2702 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2703 return 1;
2704
4e74d8ec 2705 /* constant loadable with {cau|addis} */
547b216d
DE
2706 else if ((value & 0xffff) == 0
2707 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2708 return 1;
2709
5f59ecb7 2710#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2711 else if (TARGET_POWERPC64)
4e74d8ec 2712 {
a65c591c
DE
2713 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2714 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2715
a65c591c 2716 if (high == 0 || high == -1)
4e74d8ec
MM
2717 return 2;
2718
a65c591c 2719 high >>= 1;
4e74d8ec 2720
a65c591c 2721 if (low == 0)
4e74d8ec 2722 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2723 else
2724 return (num_insns_constant_wide (high)
e396202a 2725 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2726 }
2727#endif
2728
2729 else
2730 return 2;
2731}
2732
2733int
a2369ed3 2734num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2735{
37409796 2736 HOST_WIDE_INT low, high;
bb8df8a6 2737
37409796 2738 switch (GET_CODE (op))
0d30d435 2739 {
37409796 2740 case CONST_INT:
0d30d435 2741#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2742 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2743 && mask64_operand (op, mode))
c4ad648e 2744 return 2;
0d30d435
DE
2745 else
2746#endif
2747 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2748
37409796 2749 case CONST_DOUBLE:
e41b2a33 2750 if (mode == SFmode || mode == SDmode)
37409796
NS
2751 {
2752 long l;
2753 REAL_VALUE_TYPE rv;
bb8df8a6 2754
37409796 2755 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2756 if (DECIMAL_FLOAT_MODE_P (mode))
2757 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2758 else
2759 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2760 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2761 }
a260abc9 2762
37409796
NS
2763 if (mode == VOIDmode || mode == DImode)
2764 {
2765 high = CONST_DOUBLE_HIGH (op);
2766 low = CONST_DOUBLE_LOW (op);
2767 }
2768 else
2769 {
2770 long l[2];
2771 REAL_VALUE_TYPE rv;
bb8df8a6 2772
37409796 2773 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2774 if (DECIMAL_FLOAT_MODE_P (mode))
2775 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2776 else
2777 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2778 high = l[WORDS_BIG_ENDIAN == 0];
2779 low = l[WORDS_BIG_ENDIAN != 0];
2780 }
47ad8c61 2781
37409796
NS
2782 if (TARGET_32BIT)
2783 return (num_insns_constant_wide (low)
2784 + num_insns_constant_wide (high));
2785 else
2786 {
2787 if ((high == 0 && low >= 0)
2788 || (high == -1 && low < 0))
2789 return num_insns_constant_wide (low);
bb8df8a6 2790
1990cd79 2791 else if (mask64_operand (op, mode))
37409796 2792 return 2;
bb8df8a6 2793
37409796
NS
2794 else if (low == 0)
2795 return num_insns_constant_wide (high) + 1;
bb8df8a6 2796
37409796
NS
2797 else
2798 return (num_insns_constant_wide (high)
2799 + num_insns_constant_wide (low) + 1);
2800 }
bb8df8a6 2801
37409796
NS
2802 default:
2803 gcc_unreachable ();
4e74d8ec 2804 }
4e74d8ec
MM
2805}
2806
0972012c
RS
2807/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2808 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2809 corresponding element of the vector, but for V4SFmode and V2SFmode,
2810 the corresponding "float" is interpreted as an SImode integer. */
2811
847535b6 2812HOST_WIDE_INT
0972012c
RS
2813const_vector_elt_as_int (rtx op, unsigned int elt)
2814{
2815 rtx tmp = CONST_VECTOR_ELT (op, elt);
2816 if (GET_MODE (op) == V4SFmode
2817 || GET_MODE (op) == V2SFmode)
2818 tmp = gen_lowpart (SImode, tmp);
2819 return INTVAL (tmp);
2820}
452a7d36 2821
77ccdfed 2822/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2823 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2824 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2825 all items are set to the same value and contain COPIES replicas of the
2826 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2827 operand and the others are set to the value of the operand's msb. */
2828
2829static bool
2830vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2831{
66180ff3
PB
2832 enum machine_mode mode = GET_MODE (op);
2833 enum machine_mode inner = GET_MODE_INNER (mode);
2834
2835 unsigned i;
2836 unsigned nunits = GET_MODE_NUNITS (mode);
2837 unsigned bitsize = GET_MODE_BITSIZE (inner);
2838 unsigned mask = GET_MODE_MASK (inner);
2839
0972012c 2840 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2841 HOST_WIDE_INT splat_val = val;
2842 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2843
2844 /* Construct the value to be splatted, if possible. If not, return 0. */
2845 for (i = 2; i <= copies; i *= 2)
452a7d36 2846 {
66180ff3
PB
2847 HOST_WIDE_INT small_val;
2848 bitsize /= 2;
2849 small_val = splat_val >> bitsize;
2850 mask >>= bitsize;
2851 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2852 return false;
2853 splat_val = small_val;
2854 }
c4ad648e 2855
66180ff3
PB
2856 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2857 if (EASY_VECTOR_15 (splat_val))
2858 ;
2859
2860 /* Also check if we can splat, and then add the result to itself. Do so if
2861 the value is positive, of if the splat instruction is using OP's mode;
2862 for splat_val < 0, the splat and the add should use the same mode. */
2863 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2864 && (splat_val >= 0 || (step == 1 && copies == 1)))
2865 ;
2866
2867 else
2868 return false;
2869
2870 /* Check if VAL is present in every STEP-th element, and the
2871 other elements are filled with its most significant bit. */
2872 for (i = 0; i < nunits - 1; ++i)
2873 {
2874 HOST_WIDE_INT desired_val;
2875 if (((i + 1) & (step - 1)) == 0)
2876 desired_val = val;
2877 else
2878 desired_val = msb_val;
2879
0972012c 2880 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2881 return false;
452a7d36 2882 }
66180ff3
PB
2883
2884 return true;
452a7d36
HP
2885}
2886
69ef87e2 2887
77ccdfed 2888/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2889 with a vspltisb, vspltish or vspltisw. */
2890
2891bool
2892easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2893{
66180ff3 2894 unsigned step, copies;
d744e06e 2895
66180ff3
PB
2896 if (mode == VOIDmode)
2897 mode = GET_MODE (op);
2898 else if (mode != GET_MODE (op))
2899 return false;
d744e06e 2900
66180ff3
PB
2901 /* Start with a vspltisw. */
2902 step = GET_MODE_NUNITS (mode) / 4;
2903 copies = 1;
2904
2905 if (vspltis_constant (op, step, copies))
2906 return true;
2907
2908 /* Then try with a vspltish. */
2909 if (step == 1)
2910 copies <<= 1;
2911 else
2912 step >>= 1;
2913
2914 if (vspltis_constant (op, step, copies))
2915 return true;
2916
2917 /* And finally a vspltisb. */
2918 if (step == 1)
2919 copies <<= 1;
2920 else
2921 step >>= 1;
2922
2923 if (vspltis_constant (op, step, copies))
2924 return true;
2925
2926 return false;
d744e06e
AH
2927}
2928
66180ff3
PB
2929/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2930 result is OP. Abort if it is not possible. */
d744e06e 2931
f676971a 2932rtx
66180ff3 2933gen_easy_altivec_constant (rtx op)
452a7d36 2934{
66180ff3
PB
2935 enum machine_mode mode = GET_MODE (op);
2936 int nunits = GET_MODE_NUNITS (mode);
2937 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2938 unsigned step = nunits / 4;
2939 unsigned copies = 1;
2940
2941 /* Start with a vspltisw. */
2942 if (vspltis_constant (op, step, copies))
2943 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2944
2945 /* Then try with a vspltish. */
2946 if (step == 1)
2947 copies <<= 1;
2948 else
2949 step >>= 1;
2950
2951 if (vspltis_constant (op, step, copies))
2952 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2953
2954 /* And finally a vspltisb. */
2955 if (step == 1)
2956 copies <<= 1;
2957 else
2958 step >>= 1;
2959
2960 if (vspltis_constant (op, step, copies))
2961 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2962
2963 gcc_unreachable ();
d744e06e
AH
2964}
2965
2966const char *
a2369ed3 2967output_vec_const_move (rtx *operands)
d744e06e
AH
2968{
2969 int cst, cst2;
2970 enum machine_mode mode;
2971 rtx dest, vec;
2972
2973 dest = operands[0];
2974 vec = operands[1];
d744e06e 2975 mode = GET_MODE (dest);
69ef87e2 2976
d744e06e
AH
2977 if (TARGET_ALTIVEC)
2978 {
66180ff3 2979 rtx splat_vec;
d744e06e
AH
2980 if (zero_constant (vec, mode))
2981 return "vxor %0,%0,%0";
37409796 2982
66180ff3
PB
2983 splat_vec = gen_easy_altivec_constant (vec);
2984 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2985 operands[1] = XEXP (splat_vec, 0);
2986 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2987 return "#";
bb8df8a6 2988
66180ff3 2989 switch (GET_MODE (splat_vec))
98ef3137 2990 {
37409796 2991 case V4SImode:
66180ff3 2992 return "vspltisw %0,%1";
c4ad648e 2993
37409796 2994 case V8HImode:
66180ff3 2995 return "vspltish %0,%1";
c4ad648e 2996
37409796 2997 case V16QImode:
66180ff3 2998 return "vspltisb %0,%1";
bb8df8a6 2999
37409796
NS
3000 default:
3001 gcc_unreachable ();
98ef3137 3002 }
69ef87e2
AH
3003 }
3004
37409796 3005 gcc_assert (TARGET_SPE);
bb8df8a6 3006
37409796
NS
3007 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3008 pattern of V1DI, V4HI, and V2SF.
3009
3010 FIXME: We should probably return # and add post reload
3011 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
3012 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3013 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3014 operands[1] = CONST_VECTOR_ELT (vec, 0);
3015 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
3016 if (cst == cst2)
3017 return "li %0,%1\n\tevmergelo %0,%0,%0";
3018 else
3019 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
3020}
3021
f5027409
RE
3022/* Initialize TARGET of vector PAIRED to VALS. */
3023
3024void
3025paired_expand_vector_init (rtx target, rtx vals)
3026{
3027 enum machine_mode mode = GET_MODE (target);
3028 int n_elts = GET_MODE_NUNITS (mode);
3029 int n_var = 0;
0a2aaacc 3030 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3031 int i;
3032
3033 for (i = 0; i < n_elts; ++i)
3034 {
3035 x = XVECEXP (vals, 0, i);
3036 if (!CONSTANT_P (x))
3037 ++n_var;
3038 }
3039 if (n_var == 0)
3040 {
3041 /* Load from constant pool. */
3042 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3043 return;
3044 }
3045
3046 if (n_var == 2)
3047 {
3048 /* The vector is initialized only with non-constants. */
0a2aaacc 3049 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3050 XVECEXP (vals, 0, 1));
3051
0a2aaacc 3052 emit_move_insn (target, new_rtx);
f5027409
RE
3053 return;
3054 }
3055
3056 /* One field is non-constant and the other one is a constant. Load the
3057 constant from the constant pool and use ps_merge instruction to
3058 construct the whole vector. */
3059 op1 = XVECEXP (vals, 0, 0);
3060 op2 = XVECEXP (vals, 0, 1);
3061
3062 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3063
3064 tmp = gen_reg_rtx (GET_MODE (constant_op));
3065 emit_move_insn (tmp, constant_op);
3066
3067 if (CONSTANT_P (op1))
0a2aaacc 3068 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3069 else
0a2aaacc 3070 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3071
0a2aaacc 3072 emit_move_insn (target, new_rtx);
f5027409
RE
3073}
3074
e2e95f45
RE
3075void
3076paired_expand_vector_move (rtx operands[])
3077{
3078 rtx op0 = operands[0], op1 = operands[1];
3079
3080 emit_move_insn (op0, op1);
3081}
3082
3083/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3084 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3085 operands for the relation operation COND. This is a recursive
3086 function. */
3087
3088static void
3089paired_emit_vector_compare (enum rtx_code rcode,
3090 rtx dest, rtx op0, rtx op1,
3091 rtx cc_op0, rtx cc_op1)
3092{
3093 rtx tmp = gen_reg_rtx (V2SFmode);
3094 rtx tmp1, max, min, equal_zero;
3095
3096 gcc_assert (TARGET_PAIRED_FLOAT);
3097 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3098
3099 switch (rcode)
3100 {
3101 case LT:
3102 case LTU:
3103 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3104 return;
3105 case GE:
3106 case GEU:
3107 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3108 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3109 return;
3110 case LE:
3111 case LEU:
3112 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3113 return;
3114 case GT:
3115 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3116 return;
3117 case EQ:
3118 tmp1 = gen_reg_rtx (V2SFmode);
3119 max = gen_reg_rtx (V2SFmode);
3120 min = gen_reg_rtx (V2SFmode);
3121 equal_zero = gen_reg_rtx (V2SFmode);
3122
3123 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3124 emit_insn (gen_selv2sf4
3125 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3126 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3127 emit_insn (gen_selv2sf4
3128 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3129 emit_insn (gen_subv2sf3 (tmp1, min, max));
3130 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3131 return;
3132 case NE:
3133 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3134 return;
3135 case UNLE:
3136 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3137 return;
3138 case UNLT:
3139 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3140 return;
3141 case UNGE:
3142 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3143 return;
3144 case UNGT:
3145 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3146 return;
3147 default:
3148 gcc_unreachable ();
3149 }
3150
3151 return;
3152}
3153
3154/* Emit vector conditional expression.
3155 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3156 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3157
3158int
3159paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3160 rtx cond, rtx cc_op0, rtx cc_op1)
3161{
3162 enum rtx_code rcode = GET_CODE (cond);
3163
3164 if (!TARGET_PAIRED_FLOAT)
3165 return 0;
3166
3167 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3168
3169 return 1;
3170}
3171
7a4eca66
DE
3172/* Initialize vector TARGET to VALS. */
3173
3174void
3175rs6000_expand_vector_init (rtx target, rtx vals)
3176{
3177 enum machine_mode mode = GET_MODE (target);
3178 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3179 int n_elts = GET_MODE_NUNITS (mode);
3180 int n_var = 0, one_var = -1;
3181 bool all_same = true, all_const_zero = true;
3182 rtx x, mem;
3183 int i;
3184
3185 for (i = 0; i < n_elts; ++i)
3186 {
3187 x = XVECEXP (vals, 0, i);
3188 if (!CONSTANT_P (x))
3189 ++n_var, one_var = i;
3190 else if (x != CONST0_RTX (inner_mode))
3191 all_const_zero = false;
3192
3193 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3194 all_same = false;
3195 }
3196
3197 if (n_var == 0)
3198 {
501fb355 3199 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3200 if (mode != V4SFmode && all_const_zero)
3201 {
3202 /* Zero register. */
3203 emit_insn (gen_rtx_SET (VOIDmode, target,
3204 gen_rtx_XOR (mode, target, target)));
3205 return;
3206 }
501fb355 3207 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3208 {
3209 /* Splat immediate. */
501fb355 3210 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3211 return;
3212 }
3213 else if (all_same)
3214 ; /* Splat vector element. */
3215 else
3216 {
3217 /* Load from constant pool. */
501fb355 3218 emit_move_insn (target, const_vec);
7a4eca66
DE
3219 return;
3220 }
3221 }
3222
3223 /* Store value to stack temp. Load vector element. Splat. */
3224 if (all_same)
3225 {
3226 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3227 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3228 XVECEXP (vals, 0, 0));
3229 x = gen_rtx_UNSPEC (VOIDmode,
3230 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3231 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3232 gen_rtvec (2,
3233 gen_rtx_SET (VOIDmode,
3234 target, mem),
3235 x)));
3236 x = gen_rtx_VEC_SELECT (inner_mode, target,
3237 gen_rtx_PARALLEL (VOIDmode,
3238 gen_rtvec (1, const0_rtx)));
3239 emit_insn (gen_rtx_SET (VOIDmode, target,
3240 gen_rtx_VEC_DUPLICATE (mode, x)));
3241 return;
3242 }
3243
3244 /* One field is non-constant. Load constant then overwrite
3245 varying field. */
3246 if (n_var == 1)
3247 {
3248 rtx copy = copy_rtx (vals);
3249
57b51d4d 3250 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3251 varying element. */
3252 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3253 rs6000_expand_vector_init (target, copy);
3254
3255 /* Insert variable. */
3256 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3257 return;
3258 }
3259
3260 /* Construct the vector in memory one field at a time
3261 and load the whole vector. */
3262 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3263 for (i = 0; i < n_elts; i++)
3264 emit_move_insn (adjust_address_nv (mem, inner_mode,
3265 i * GET_MODE_SIZE (inner_mode)),
3266 XVECEXP (vals, 0, i));
3267 emit_move_insn (target, mem);
3268}
3269
3270/* Set field ELT of TARGET to VAL. */
3271
3272void
3273rs6000_expand_vector_set (rtx target, rtx val, int elt)
3274{
3275 enum machine_mode mode = GET_MODE (target);
3276 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3277 rtx reg = gen_reg_rtx (mode);
3278 rtx mask, mem, x;
3279 int width = GET_MODE_SIZE (inner_mode);
3280 int i;
3281
3282 /* Load single variable value. */
3283 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3284 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3285 x = gen_rtx_UNSPEC (VOIDmode,
3286 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3287 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3288 gen_rtvec (2,
3289 gen_rtx_SET (VOIDmode,
3290 reg, mem),
3291 x)));
3292
3293 /* Linear sequence. */
3294 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3295 for (i = 0; i < 16; ++i)
3296 XVECEXP (mask, 0, i) = GEN_INT (i);
3297
3298 /* Set permute mask to insert element into target. */
3299 for (i = 0; i < width; ++i)
3300 XVECEXP (mask, 0, elt*width + i)
3301 = GEN_INT (i + 0x10);
3302 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3303 x = gen_rtx_UNSPEC (mode,
3304 gen_rtvec (3, target, reg,
3305 force_reg (V16QImode, x)),
3306 UNSPEC_VPERM);
3307 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3308}
3309
3310/* Extract field ELT from VEC into TARGET. */
3311
3312void
3313rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3314{
3315 enum machine_mode mode = GET_MODE (vec);
3316 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3317 rtx mem, x;
3318
3319 /* Allocate mode-sized buffer. */
3320 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3321
3322 /* Add offset to field within buffer matching vector element. */
3323 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3324
3325 /* Store single field into mode-sized buffer. */
3326 x = gen_rtx_UNSPEC (VOIDmode,
3327 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3328 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3329 gen_rtvec (2,
3330 gen_rtx_SET (VOIDmode,
3331 mem, vec),
3332 x)));
3333 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3334}
3335
0ba1b2ff
AM
3336/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3337 implement ANDing by the mask IN. */
3338void
a2369ed3 3339build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3340{
3341#if HOST_BITS_PER_WIDE_INT >= 64
3342 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3343 int shift;
3344
37409796 3345 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3346
3347 c = INTVAL (in);
3348 if (c & 1)
3349 {
3350 /* Assume c initially something like 0x00fff000000fffff. The idea
3351 is to rotate the word so that the middle ^^^^^^ group of zeros
3352 is at the MS end and can be cleared with an rldicl mask. We then
3353 rotate back and clear off the MS ^^ group of zeros with a
3354 second rldicl. */
3355 c = ~c; /* c == 0xff000ffffff00000 */
3356 lsb = c & -c; /* lsb == 0x0000000000100000 */
3357 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3358 c = ~c; /* c == 0x00fff000000fffff */
3359 c &= -lsb; /* c == 0x00fff00000000000 */
3360 lsb = c & -c; /* lsb == 0x0000100000000000 */
3361 c = ~c; /* c == 0xff000fffffffffff */
3362 c &= -lsb; /* c == 0xff00000000000000 */
3363 shift = 0;
3364 while ((lsb >>= 1) != 0)
3365 shift++; /* shift == 44 on exit from loop */
3366 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3367 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3368 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3369 }
3370 else
0ba1b2ff
AM
3371 {
3372 /* Assume c initially something like 0xff000f0000000000. The idea
3373 is to rotate the word so that the ^^^ middle group of zeros
3374 is at the LS end and can be cleared with an rldicr mask. We then
3375 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3376 a second rldicr. */
3377 lsb = c & -c; /* lsb == 0x0000010000000000 */
3378 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3379 c = ~c; /* c == 0x00fff0ffffffffff */
3380 c &= -lsb; /* c == 0x00fff00000000000 */
3381 lsb = c & -c; /* lsb == 0x0000100000000000 */
3382 c = ~c; /* c == 0xff000fffffffffff */
3383 c &= -lsb; /* c == 0xff00000000000000 */
3384 shift = 0;
3385 while ((lsb >>= 1) != 0)
3386 shift++; /* shift == 44 on exit from loop */
3387 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3388 m1 >>= shift; /* m1 == 0x0000000000000fff */
3389 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3390 }
3391
3392 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3393 masks will be all 1's. We are guaranteed more than one transition. */
3394 out[0] = GEN_INT (64 - shift);
3395 out[1] = GEN_INT (m1);
3396 out[2] = GEN_INT (shift);
3397 out[3] = GEN_INT (m2);
3398#else
045572c7
GK
3399 (void)in;
3400 (void)out;
37409796 3401 gcc_unreachable ();
0ba1b2ff 3402#endif
a260abc9
DE
3403}
3404
54b695e7 3405/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3406
3407bool
54b695e7
AH
3408invalid_e500_subreg (rtx op, enum machine_mode mode)
3409{
61c76239
JM
3410 if (TARGET_E500_DOUBLE)
3411 {
17caeff2 3412 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3413 subreg:TI and reg:TF. Decimal float modes are like integer
3414 modes (only low part of each register used) for this
3415 purpose. */
61c76239 3416 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3417 && (mode == SImode || mode == DImode || mode == TImode
3418 || mode == DDmode || mode == TDmode)
61c76239 3419 && REG_P (SUBREG_REG (op))
17caeff2 3420 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3421 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3422 return true;
3423
17caeff2
JM
3424 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3425 reg:TI. */
61c76239 3426 if (GET_CODE (op) == SUBREG
4f011e1e 3427 && (mode == DFmode || mode == TFmode)
61c76239 3428 && REG_P (SUBREG_REG (op))
17caeff2 3429 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3430 || GET_MODE (SUBREG_REG (op)) == TImode
3431 || GET_MODE (SUBREG_REG (op)) == DDmode
3432 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3433 return true;
3434 }
54b695e7 3435
61c76239
JM
3436 if (TARGET_SPE
3437 && GET_CODE (op) == SUBREG
3438 && mode == SImode
54b695e7 3439 && REG_P (SUBREG_REG (op))
14502dad 3440 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3441 return true;
3442
3443 return false;
3444}
3445
58182de3 3446/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3447 field is an FP double while the FP fields remain word aligned. */
3448
19d66194 3449unsigned int
fa5b0972
AM
3450rs6000_special_round_type_align (tree type, unsigned int computed,
3451 unsigned int specified)
95727fb8 3452{
fa5b0972 3453 unsigned int align = MAX (computed, specified);
95727fb8 3454 tree field = TYPE_FIELDS (type);
95727fb8 3455
bb8df8a6 3456 /* Skip all non field decls */
85962ac8 3457 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3458 field = TREE_CHAIN (field);
3459
fa5b0972
AM
3460 if (field != NULL && field != type)
3461 {
3462 type = TREE_TYPE (field);
3463 while (TREE_CODE (type) == ARRAY_TYPE)
3464 type = TREE_TYPE (type);
3465
3466 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3467 align = MAX (align, 64);
3468 }
95727fb8 3469
fa5b0972 3470 return align;
95727fb8
AP
3471}
3472
58182de3
GK
3473/* Darwin increases record alignment to the natural alignment of
3474 the first field. */
3475
3476unsigned int
3477darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3478 unsigned int specified)
3479{
3480 unsigned int align = MAX (computed, specified);
3481
3482 if (TYPE_PACKED (type))
3483 return align;
3484
3485 /* Find the first field, looking down into aggregates. */
3486 do {
3487 tree field = TYPE_FIELDS (type);
3488 /* Skip all non field decls */
3489 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3490 field = TREE_CHAIN (field);
3491 if (! field)
3492 break;
3493 type = TREE_TYPE (field);
3494 while (TREE_CODE (type) == ARRAY_TYPE)
3495 type = TREE_TYPE (type);
3496 } while (AGGREGATE_TYPE_P (type));
3497
3498 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3499 align = MAX (align, TYPE_ALIGN (type));
3500
3501 return align;
3502}
3503
a4f6c312 3504/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3505
3506int
f676971a 3507small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3508 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3509{
38c1f2d7 3510#if TARGET_ELF
5f59ecb7 3511 rtx sym_ref;
7509c759 3512
d9407988 3513 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3514 return 0;
a54d04b7 3515
f607bc57 3516 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3517 return 0;
3518
2aa42e6e
NF
3519 /* Vector and float memory instructions have a limited offset on the
3520 SPE, so using a vector or float variable directly as an operand is
3521 not useful. */
3522 if (TARGET_SPE
3523 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3524 return 0;
3525
88228c4b
MM
3526 if (GET_CODE (op) == SYMBOL_REF)
3527 sym_ref = op;
3528
3529 else if (GET_CODE (op) != CONST
3530 || GET_CODE (XEXP (op, 0)) != PLUS
3531 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3532 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3533 return 0;
3534
88228c4b 3535 else
dbf55e53
MM
3536 {
3537 rtx sum = XEXP (op, 0);
3538 HOST_WIDE_INT summand;
3539
3540 /* We have to be careful here, because it is the referenced address
c4ad648e 3541 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3542 summand = INTVAL (XEXP (sum, 1));
307b599c 3543 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3544 return 0;
dbf55e53
MM
3545
3546 sym_ref = XEXP (sum, 0);
3547 }
88228c4b 3548
20bfcd69 3549 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3550#else
3551 return 0;
3552#endif
7509c759 3553}
46c07df8 3554
3a1f863f 3555/* Return true if either operand is a general purpose register. */
46c07df8 3556
3a1f863f
DE
3557bool
3558gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3559{
3a1f863f
DE
3560 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3561 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3562}
3563
9ebbca7d 3564\f
c6c3dba9 3565/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address_p. */
4d588c14 3566
4d588c14 3567static bool
a2369ed3 3568constant_pool_expr_p (rtx op)
9ebbca7d 3569{
2e4316da
RS
3570 rtx base, offset;
3571
3572 split_const (op, &base, &offset);
3573 return (GET_CODE (base) == SYMBOL_REF
3574 && CONSTANT_POOL_ADDRESS_P (base)
3575 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3576}
3577
48d72335 3578bool
a2369ed3 3579toc_relative_expr_p (rtx op)
9ebbca7d 3580{
2e4316da
RS
3581 rtx base, offset;
3582
3583 if (GET_CODE (op) != CONST)
3584 return false;
3585
3586 split_const (op, &base, &offset);
3587 return (GET_CODE (base) == UNSPEC
3588 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
3589}
3590
4d588c14 3591bool
a2369ed3 3592legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3593{
3594 return (TARGET_TOC
3595 && GET_CODE (x) == PLUS
3596 && GET_CODE (XEXP (x, 0)) == REG
3597 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 3598 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
3599}
3600
d04b6e6e
EB
3601static bool
3602legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3603{
3604 return (DEFAULT_ABI == ABI_V4
3605 && !flag_pic && !TARGET_TOC
3606 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3607 && small_data_operand (x, mode));
3608}
3609
60cdabab
DE
3610/* SPE offset addressing is limited to 5-bits worth of double words. */
3611#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3612
76d2b81d
DJ
3613bool
3614rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3615{
3616 unsigned HOST_WIDE_INT offset, extra;
3617
3618 if (GET_CODE (x) != PLUS)
3619 return false;
3620 if (GET_CODE (XEXP (x, 0)) != REG)
3621 return false;
3622 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3623 return false;
60cdabab
DE
3624 if (legitimate_constant_pool_address_p (x))
3625 return true;
4d588c14
RH
3626 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3627 return false;
3628
3629 offset = INTVAL (XEXP (x, 1));
3630 extra = 0;
3631 switch (mode)
3632 {
3633 case V16QImode:
3634 case V8HImode:
3635 case V4SFmode:
3636 case V4SImode:
7a4eca66 3637 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3638 constant offset zero should not occur due to canonicalization. */
3639 return false;
4d588c14
RH
3640
3641 case V4HImode:
3642 case V2SImode:
3643 case V1DImode:
3644 case V2SFmode:
d42a3bae 3645 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3646 constant offset zero should not occur due to canonicalization. */
d42a3bae 3647 if (TARGET_PAIRED_FLOAT)
1a23970d 3648 return false;
4d588c14
RH
3649 /* SPE vector modes. */
3650 return SPE_CONST_OFFSET_OK (offset);
3651
3652 case DFmode:
4d4cbc0e
AH
3653 if (TARGET_E500_DOUBLE)
3654 return SPE_CONST_OFFSET_OK (offset);
3655
4f011e1e 3656 case DDmode:
4d588c14 3657 case DImode:
54b695e7
AH
3658 /* On e500v2, we may have:
3659
3660 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3661
3662 Which gets addressed with evldd instructions. */
3663 if (TARGET_E500_DOUBLE)
3664 return SPE_CONST_OFFSET_OK (offset);
3665
7393f7f8 3666 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3667 extra = 4;
3668 else if (offset & 3)
3669 return false;
3670 break;
3671
3672 case TFmode:
17caeff2
JM
3673 if (TARGET_E500_DOUBLE)
3674 return (SPE_CONST_OFFSET_OK (offset)
3675 && SPE_CONST_OFFSET_OK (offset + 8));
3676
4f011e1e 3677 case TDmode:
4d588c14 3678 case TImode:
7393f7f8 3679 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3680 extra = 12;
3681 else if (offset & 3)
3682 return false;
3683 else
3684 extra = 8;
3685 break;
3686
3687 default:
3688 break;
3689 }
3690
b1917422
AM
3691 offset += 0x8000;
3692 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3693}
3694
6fb5fa3c 3695bool
a2369ed3 3696legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3697{
3698 rtx op0, op1;
3699
3700 if (GET_CODE (x) != PLUS)
3701 return false;
850e8d3d 3702
4d588c14
RH
3703 op0 = XEXP (x, 0);
3704 op1 = XEXP (x, 1);
3705
bf00cc0f 3706 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3707 replaced with proper base and index regs. */
3708 if (!strict
3709 && reload_in_progress
3710 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3711 && REG_P (op1))
3712 return true;
3713
3714 return (REG_P (op0) && REG_P (op1)
3715 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3716 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3717 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3718 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3719}
3720
001b9eb6
PH
3721bool
3722avoiding_indexed_address_p (enum machine_mode mode)
3723{
3724 /* Avoid indexed addressing for modes that have non-indexed
3725 load/store instruction forms. */
3726 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
3727}
3728
48d72335 3729inline bool
a2369ed3 3730legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3731{
3732 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3733}
3734
48d72335 3735bool
4c81e946
FJ
3736macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3737{
c4ad648e 3738 if (!TARGET_MACHO || !flag_pic
9390387d 3739 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3740 return false;
3741 x = XEXP (x, 0);
4c81e946
FJ
3742
3743 if (GET_CODE (x) != LO_SUM)
3744 return false;
3745 if (GET_CODE (XEXP (x, 0)) != REG)
3746 return false;
3747 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3748 return false;
3749 x = XEXP (x, 1);
3750
3751 return CONSTANT_P (x);
3752}
3753
4d588c14 3754static bool
a2369ed3 3755legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3756{
3757 if (GET_CODE (x) != LO_SUM)
3758 return false;
3759 if (GET_CODE (XEXP (x, 0)) != REG)
3760 return false;
3761 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3762 return false;
54b695e7 3763 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3764 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3765 || mode == DDmode || mode == TDmode
17caeff2 3766 || mode == DImode))
f82f556d 3767 return false;
4d588c14
RH
3768 x = XEXP (x, 1);
3769
8622e235 3770 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3771 {
a29077da 3772 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3773 return false;
3774 if (TARGET_TOC)
3775 return false;
3776 if (GET_MODE_NUNITS (mode) != 1)
3777 return false;
5e5f01b9 3778 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3779 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 3780 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 3781 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3782 return false;
3783
3784 return CONSTANT_P (x);
3785 }
3786
3787 return false;
3788}
3789
3790
9ebbca7d
GK
3791/* Try machine-dependent ways of modifying an illegitimate address
3792 to be legitimate. If we find one, return the new, valid address.
3793 This is used from only one place: `memory_address' in explow.c.
3794
a4f6c312
SS
3795 OLDX is the address as it was before break_out_memory_refs was
3796 called. In some cases it is useful to look at this to decide what
3797 needs to be done.
9ebbca7d 3798
a4f6c312
SS
3799 It is always safe for this function to do nothing. It exists to
3800 recognize opportunities to optimize the output.
9ebbca7d
GK
3801
3802 On RS/6000, first check for the sum of a register with a constant
3803 integer that is out of range. If so, generate code to add the
3804 constant with the low-order 16 bits masked to the register and force
3805 this result into another register (this can be done with `cau').
3806 Then generate an address of REG+(CONST&0xffff), allowing for the
3807 possibility of bit 16 being a one.
3808
3809 Then check for the sum of a register and something not constant, try to
3810 load the other things into a register and return the sum. */
4d588c14 3811
9ebbca7d 3812rtx
a2369ed3
DJ
3813rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3814 enum machine_mode mode)
0ac081f6 3815{
c4501e62
JJ
3816 if (GET_CODE (x) == SYMBOL_REF)
3817 {
3818 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3819 if (model != 0)
3820 return rs6000_legitimize_tls_address (x, model);
3821 }
3822
f676971a 3823 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3824 && GET_CODE (XEXP (x, 0)) == REG
3825 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb 3826 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
7da13f1d
NF
3827 && !((TARGET_POWERPC64
3828 && (mode == DImode || mode == TImode)
3829 && (INTVAL (XEXP (x, 1)) & 3) != 0)
3830 || SPE_VECTOR_MODE (mode)
efc05e3c 3831 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3832 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3833 || mode == DImode || mode == DDmode
3834 || mode == TDmode))))
f676971a 3835 {
9ebbca7d
GK
3836 HOST_WIDE_INT high_int, low_int;
3837 rtx sum;
a65c591c
DE
3838 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3839 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3840 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3841 GEN_INT (high_int)), 0);
3842 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3843 }
f676971a 3844 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3845 && GET_CODE (XEXP (x, 0)) == REG
3846 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3847 && GET_MODE_NUNITS (mode) == 1
696e45ba 3848 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 3849 || TARGET_POWERPC64
efc05e3c 3850 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3851 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3852 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 3853 && !avoiding_indexed_address_p (mode)
efc05e3c
PB
3854 && mode != TImode
3855 && mode != TFmode
3856 && mode != TDmode)
9ebbca7d
GK
3857 {
3858 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3859 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3860 }
0ac081f6
AH
3861 else if (ALTIVEC_VECTOR_MODE (mode))
3862 {
3863 rtx reg;
3864
3865 /* Make sure both operands are registers. */
3866 if (GET_CODE (x) == PLUS)
9f85ed45 3867 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3868 force_reg (Pmode, XEXP (x, 1)));
3869
3870 reg = force_reg (Pmode, x);
3871 return reg;
3872 }
4d4cbc0e 3873 else if (SPE_VECTOR_MODE (mode)
17caeff2 3874 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3875 || mode == DDmode || mode == TDmode
54b695e7 3876 || mode == DImode)))
a3170dc6 3877 {
54b695e7 3878 if (mode == DImode)
506d7b68 3879 return x;
a3170dc6
AH
3880 /* We accept [reg + reg] and [reg + OFFSET]. */
3881
3882 if (GET_CODE (x) == PLUS)
61dd226f
NF
3883 {
3884 rtx op1 = XEXP (x, 0);
3885 rtx op2 = XEXP (x, 1);
3886 rtx y;
3887
3888 op1 = force_reg (Pmode, op1);
3889
3890 if (GET_CODE (op2) != REG
3891 && (GET_CODE (op2) != CONST_INT
3892 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3893 || (GET_MODE_SIZE (mode) > 8
3894 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3895 op2 = force_reg (Pmode, op2);
3896
3897 /* We can't always do [reg + reg] for these, because [reg +
3898 reg + offset] is not a legitimate addressing mode. */
3899 y = gen_rtx_PLUS (Pmode, op1, op2);
3900
4f011e1e 3901 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3902 return force_reg (Pmode, y);
3903 else
3904 return y;
3905 }
a3170dc6
AH
3906
3907 return force_reg (Pmode, x);
3908 }
f1384257
AM
3909 else if (TARGET_ELF
3910 && TARGET_32BIT
3911 && TARGET_NO_TOC
3912 && ! flag_pic
9ebbca7d 3913 && GET_CODE (x) != CONST_INT
f676971a 3914 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3915 && CONSTANT_P (x)
6ac7bf2c
GK
3916 && GET_MODE_NUNITS (mode) == 1
3917 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 3918 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3919 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3920 {
3921 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3922 emit_insn (gen_elf_high (reg, x));
3923 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3924 }
ee890fe2
SS
3925 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3926 && ! flag_pic
ab82a49f
AP
3927#if TARGET_MACHO
3928 && ! MACHO_DYNAMIC_NO_PIC_P
3929#endif
ee890fe2 3930 && GET_CODE (x) != CONST_INT
f676971a 3931 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3932 && CONSTANT_P (x)
506a7bc8 3933 && GET_MODE_NUNITS (mode) == 1
696e45ba 3934 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3935 || (mode != DFmode && mode != DDmode))
f676971a 3936 && mode != DImode
ee890fe2
SS
3937 && mode != TImode)
3938 {
3939 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3940 emit_insn (gen_macho_high (reg, x));
3941 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3942 }
f676971a 3943 else if (TARGET_TOC
0cdc04e8 3944 && GET_CODE (x) == SYMBOL_REF
4d588c14 3945 && constant_pool_expr_p (x)
a9098fd0 3946 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3947 {
3948 return create_TOC_reference (x);
3949 }
3950 else
506d7b68 3951 return x;
9ebbca7d 3952}
258bfae2 3953
fdbe66f2 3954/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3955 We need to emit DTP-relative relocations. */
3956
fdbe66f2 3957static void
c973d557
JJ
3958rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3959{
3960 switch (size)
3961 {
3962 case 4:
3963 fputs ("\t.long\t", file);
3964 break;
3965 case 8:
3966 fputs (DOUBLE_INT_ASM_OP, file);
3967 break;
3968 default:
37409796 3969 gcc_unreachable ();
c973d557
JJ
3970 }
3971 output_addr_const (file, x);
3972 fputs ("@dtprel+0x8000", file);
3973}
3974
c4501e62
JJ
3975/* Construct the SYMBOL_REF for the tls_get_addr function. */
3976
3977static GTY(()) rtx rs6000_tls_symbol;
3978static rtx
863d938c 3979rs6000_tls_get_addr (void)
c4501e62
JJ
3980{
3981 if (!rs6000_tls_symbol)
3982 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3983
3984 return rs6000_tls_symbol;
3985}
3986
3987/* Construct the SYMBOL_REF for TLS GOT references. */
3988
3989static GTY(()) rtx rs6000_got_symbol;
3990static rtx
863d938c 3991rs6000_got_sym (void)
c4501e62
JJ
3992{
3993 if (!rs6000_got_symbol)
3994 {
3995 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3996 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3997 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3998 }
c4501e62
JJ
3999
4000 return rs6000_got_symbol;
4001}
4002
4003/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4004 this (thread-local) address. */
4005
4006static rtx
a2369ed3 4007rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
4008{
4009 rtx dest, insn;
4010
4011 dest = gen_reg_rtx (Pmode);
4012 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4013 {
4014 rtx tlsreg;
4015
4016 if (TARGET_64BIT)
4017 {
4018 tlsreg = gen_rtx_REG (Pmode, 13);
4019 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4020 }
4021 else
4022 {
4023 tlsreg = gen_rtx_REG (Pmode, 2);
4024 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4025 }
4026 emit_insn (insn);
4027 }
4028 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4029 {
4030 rtx tlsreg, tmp;
4031
4032 tmp = gen_reg_rtx (Pmode);
4033 if (TARGET_64BIT)
4034 {
4035 tlsreg = gen_rtx_REG (Pmode, 13);
4036 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4037 }
4038 else
4039 {
4040 tlsreg = gen_rtx_REG (Pmode, 2);
4041 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4042 }
4043 emit_insn (insn);
4044 if (TARGET_64BIT)
4045 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4046 else
4047 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4048 emit_insn (insn);
4049 }
4050 else
4051 {
4052 rtx r3, got, tga, tmp1, tmp2, eqv;
4053
4fed8f8f
AM
4054 /* We currently use relocations like @got@tlsgd for tls, which
4055 means the linker will handle allocation of tls entries, placing
4056 them in the .got section. So use a pointer to the .got section,
4057 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4058 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4059 if (TARGET_64BIT)
972f427b 4060 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4061 else
4062 {
4063 if (flag_pic == 1)
4064 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4065 else
4066 {
4067 rtx gsym = rs6000_got_sym ();
4068 got = gen_reg_rtx (Pmode);
4069 if (flag_pic == 0)
4070 rs6000_emit_move (got, gsym, Pmode);
4071 else
4072 {
e65a3857 4073 rtx tmp3, mem;
c4501e62
JJ
4074 rtx first, last;
4075
c4501e62
JJ
4076 tmp1 = gen_reg_rtx (Pmode);
4077 tmp2 = gen_reg_rtx (Pmode);
4078 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4079 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4080
e65a3857
DE
4081 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4082 emit_move_insn (tmp1,
1de43f85 4083 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4084 emit_move_insn (tmp2, mem);
4085 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4086 last = emit_move_insn (got, tmp3);
bd94cb6e 4087 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4088 }
4089 }
4090 }
4091
4092 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4093 {
4094 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4095 tga = rs6000_tls_get_addr ();
4096
4097 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4098 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4099 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4100 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4101 else if (DEFAULT_ABI == ABI_V4)
4102 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4103 else
02135bc1
SB
4104 gcc_unreachable ();
4105
c4501e62 4106 start_sequence ();
c4501e62 4107 insn = emit_call_insn (insn);
becfd6e5 4108 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4109 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4110 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4111 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4112 insn = get_insns ();
4113 end_sequence ();
4114 emit_libcall_block (insn, dest, r3, addr);
4115 }
4116 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4117 {
4118 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4119 tga = rs6000_tls_get_addr ();
4120
4121 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4122 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4123 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4124 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4125 else if (DEFAULT_ABI == ABI_V4)
4126 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4127 else
02135bc1
SB
4128 gcc_unreachable ();
4129
c4501e62 4130 start_sequence ();
c4501e62 4131 insn = emit_call_insn (insn);
becfd6e5 4132 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4133 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4134 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4135 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4136 insn = get_insns ();
4137 end_sequence ();
4138 tmp1 = gen_reg_rtx (Pmode);
4139 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4140 UNSPEC_TLSLD);
4141 emit_libcall_block (insn, tmp1, r3, eqv);
4142 if (rs6000_tls_size == 16)
4143 {
4144 if (TARGET_64BIT)
4145 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4146 else
4147 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4148 }
4149 else if (rs6000_tls_size == 32)
4150 {
4151 tmp2 = gen_reg_rtx (Pmode);
4152 if (TARGET_64BIT)
4153 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4154 else
4155 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4156 emit_insn (insn);
4157 if (TARGET_64BIT)
4158 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4159 else
4160 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4161 }
4162 else
4163 {
4164 tmp2 = gen_reg_rtx (Pmode);
4165 if (TARGET_64BIT)
4166 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4167 else
4168 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4169 emit_insn (insn);
4170 insn = gen_rtx_SET (Pmode, dest,
4171 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4172 }
4173 emit_insn (insn);
4174 }
4175 else
4176 {
a7b376ee 4177 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4178 tmp2 = gen_reg_rtx (Pmode);
4179 if (TARGET_64BIT)
4180 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4181 else
4182 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4183 emit_insn (insn);
4184 if (TARGET_64BIT)
4185 insn = gen_tls_tls_64 (dest, tmp2, addr);
4186 else
4187 insn = gen_tls_tls_32 (dest, tmp2, addr);
4188 emit_insn (insn);
4189 }
4190 }
4191
4192 return dest;
4193}
4194
c4501e62
JJ
4195/* Return 1 if X contains a thread-local symbol. */
4196
4197bool
a2369ed3 4198rs6000_tls_referenced_p (rtx x)
c4501e62 4199{
cd413cab
AP
4200 if (! TARGET_HAVE_TLS)
4201 return false;
4202
c4501e62
JJ
4203 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4204}
4205
4206/* Return 1 if *X is a thread-local symbol. This is the same as
4207 rs6000_tls_symbol_ref except for the type of the unused argument. */
4208
9390387d 4209static int
a2369ed3 4210rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4211{
4212 return RS6000_SYMBOL_REF_TLS_P (*x);
4213}
4214
24ea750e
DJ
4215/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4216 replace the input X, or the original X if no replacement is called for.
4217 The output parameter *WIN is 1 if the calling macro should goto WIN,
4218 0 if it should not.
4219
4220 For RS/6000, we wish to handle large displacements off a base
4221 register by splitting the addend across an addiu/addis and the mem insn.
4222 This cuts number of extra insns needed from 3 to 1.
4223
4224 On Darwin, we use this to generate code for floating point constants.
4225 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4226 The Darwin code is inside #if TARGET_MACHO because only then are the
4227 machopic_* functions defined. */
24ea750e 4228rtx
f676971a 4229rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4230 int opnum, int type,
4231 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4232{
f676971a 4233 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4234 if (GET_CODE (x) == PLUS
4235 && GET_CODE (XEXP (x, 0)) == PLUS
4236 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4237 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4238 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4239 {
4240 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4241 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4242 opnum, (enum reload_type)type);
24ea750e
DJ
4243 *win = 1;
4244 return x;
4245 }
3deb2758 4246
24ea750e
DJ
4247#if TARGET_MACHO
4248 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4249 && GET_CODE (x) == LO_SUM
4250 && GET_CODE (XEXP (x, 0)) == PLUS
4251 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4253 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4254 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4255 {
4256 /* Result of previous invocation of this function on Darwin
6f317ef3 4257 floating point constant. */
24ea750e 4258 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4259 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4260 opnum, (enum reload_type)type);
24ea750e
DJ
4261 *win = 1;
4262 return x;
4263 }
4264#endif
4937d02d
DE
4265
4266 /* Force ld/std non-word aligned offset into base register by wrapping
4267 in offset 0. */
4268 if (GET_CODE (x) == PLUS
4269 && GET_CODE (XEXP (x, 0)) == REG
4270 && REGNO (XEXP (x, 0)) < 32
c6c3dba9 4271 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
4937d02d
DE
4272 && GET_CODE (XEXP (x, 1)) == CONST_INT
4273 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4274 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4275 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4276 && TARGET_POWERPC64)
4277 {
4278 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4279 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4280 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4281 opnum, (enum reload_type) type);
4282 *win = 1;
4283 return x;
4284 }
4285
24ea750e
DJ
4286 if (GET_CODE (x) == PLUS
4287 && GET_CODE (XEXP (x, 0)) == REG
4288 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
c6c3dba9 4289 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
78c875e8 4290 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4291 && !SPE_VECTOR_MODE (mode)
17caeff2 4292 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4293 || mode == DDmode || mode == TDmode
54b695e7 4294 || mode == DImode))
78c875e8 4295 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4296 {
4297 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4298 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4299 HOST_WIDE_INT high
c4ad648e 4300 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4301
4302 /* Check for 32-bit overflow. */
4303 if (high + low != val)
c4ad648e 4304 {
24ea750e
DJ
4305 *win = 0;
4306 return x;
4307 }
4308
4309 /* Reload the high part into a base reg; leave the low part
c4ad648e 4310 in the mem directly. */
24ea750e
DJ
4311
4312 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4313 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4314 GEN_INT (high)),
4315 GEN_INT (low));
24ea750e
DJ
4316
4317 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4318 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4319 opnum, (enum reload_type)type);
24ea750e
DJ
4320 *win = 1;
4321 return x;
4322 }
4937d02d 4323
24ea750e 4324 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4325 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4326 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4327#if TARGET_MACHO
4328 && DEFAULT_ABI == ABI_DARWIN
a29077da 4329 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4330#else
4331 && DEFAULT_ABI == ABI_V4
4332 && !flag_pic
4333#endif
7393f7f8 4334 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4335 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4336 without fprs. */
0d8c1c97 4337 && mode != TFmode
7393f7f8 4338 && mode != TDmode
7b5d92b2 4339 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4340 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4341 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4342 {
8308679f 4343#if TARGET_MACHO
a29077da
GK
4344 if (flag_pic)
4345 {
08a6a74b 4346 rtx offset = machopic_gen_offset (x);
a29077da
GK
4347 x = gen_rtx_LO_SUM (GET_MODE (x),
4348 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4349 gen_rtx_HIGH (Pmode, offset)), offset);
4350 }
4351 else
8308679f 4352#endif
a29077da 4353 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4354 gen_rtx_HIGH (Pmode, x), x);
a29077da 4355
24ea750e 4356 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4357 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4358 opnum, (enum reload_type)type);
24ea750e
DJ
4359 *win = 1;
4360 return x;
4361 }
4937d02d 4362
dec1f3aa
DE
4363 /* Reload an offset address wrapped by an AND that represents the
4364 masking of the lower bits. Strip the outer AND and let reload
4365 convert the offset address into an indirect address. */
4366 if (TARGET_ALTIVEC
4367 && ALTIVEC_VECTOR_MODE (mode)
4368 && GET_CODE (x) == AND
4369 && GET_CODE (XEXP (x, 0)) == PLUS
4370 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4371 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4372 && GET_CODE (XEXP (x, 1)) == CONST_INT
4373 && INTVAL (XEXP (x, 1)) == -16)
4374 {
4375 x = XEXP (x, 0);
4376 *win = 1;
4377 return x;
4378 }
4379
24ea750e 4380 if (TARGET_TOC
0cdc04e8 4381 && GET_CODE (x) == SYMBOL_REF
4d588c14 4382 && constant_pool_expr_p (x)
c1f11548 4383 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4384 {
194c524a 4385 x = create_TOC_reference (x);
24ea750e
DJ
4386 *win = 1;
4387 return x;
4388 }
4389 *win = 0;
4390 return x;
f676971a 4391}
24ea750e 4392
331d9186 4393/* TARGET_LEGITIMATE_ADDRESS_P recognizes an RTL expression
258bfae2
FS
4394 that is a valid memory address for an instruction.
4395 The MODE argument is the machine mode for the MEM expression
4396 that wants to use this address.
4397
4398 On the RS/6000, there are four valid address: a SYMBOL_REF that
4399 refers to a constant pool entry of an address (or the sum of it
4400 plus a constant), a short (16-bit signed) constant plus a register,
4401 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4402 auto-increment. For DFmode, DDmode and DImode with a constant plus
4403 register, we must ensure that both words are addressable or PowerPC64
4404 with offset word aligned.
258bfae2 4405
4d4447b5 4406 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4407 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4408 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2 4409 during assembly output. */
c6c3dba9
PB
4410bool
4411rs6000_legitimate_address_p (enum machine_mode mode, rtx x, bool reg_ok_strict)
258bfae2 4412{
850e8d3d
DN
4413 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4414 if (TARGET_ALTIVEC
4415 && ALTIVEC_VECTOR_MODE (mode)
4416 && GET_CODE (x) == AND
4417 && GET_CODE (XEXP (x, 1)) == CONST_INT
4418 && INTVAL (XEXP (x, 1)) == -16)
4419 x = XEXP (x, 0);
4420
c4501e62
JJ
4421 if (RS6000_SYMBOL_REF_TLS_P (x))
4422 return 0;
4d588c14 4423 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4424 return 1;
4425 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4426 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4427 && !SPE_VECTOR_MODE (mode)
429ec7dc 4428 && mode != TFmode
7393f7f8 4429 && mode != TDmode
54b695e7 4430 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4431 && !(TARGET_E500_DOUBLE
4432 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4433 && TARGET_UPDATE
4d588c14 4434 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4435 return 1;
d04b6e6e 4436 if (legitimate_small_data_p (mode, x))
258bfae2 4437 return 1;
4d588c14 4438 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4439 return 1;
4440 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4441 if (! reg_ok_strict
4442 && GET_CODE (x) == PLUS
4443 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4444 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4445 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4446 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4447 return 1;
76d2b81d 4448 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4449 return 1;
4450 if (mode != TImode
76d2b81d 4451 && mode != TFmode
7393f7f8 4452 && mode != TDmode
960c5c79 4453 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 4454 || TARGET_POWERPC64
4f011e1e
JM
4455 || (mode != DFmode && mode != DDmode)
4456 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4457 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4458 && !avoiding_indexed_address_p (mode)
4d588c14 4459 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4460 return 1;
6fb5fa3c
DB
4461 if (GET_CODE (x) == PRE_MODIFY
4462 && mode != TImode
4463 && mode != TFmode
4464 && mode != TDmode
696e45ba 4465 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4466 || TARGET_POWERPC64
4d4447b5 4467 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4468 && (TARGET_POWERPC64 || mode != DImode)
4469 && !ALTIVEC_VECTOR_MODE (mode)
4470 && !SPE_VECTOR_MODE (mode)
4471 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4472 && !(TARGET_E500_DOUBLE
4473 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4474 && TARGET_UPDATE
4475 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4476 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
001b9eb6
PH
4477 || (!avoiding_indexed_address_p (mode)
4478 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
6fb5fa3c
DB
4479 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4480 return 1;
4d588c14 4481 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4482 return 1;
4483 return 0;
4484}
4d588c14
RH
4485
4486/* Go to LABEL if ADDR (a legitimate address expression)
4487 has an effect that depends on the machine mode it is used for.
4488
4489 On the RS/6000 this is true of all integral offsets (since AltiVec
4490 modes don't allow them) or is a pre-increment or decrement.
4491
4492 ??? Except that due to conceptual problems in offsettable_address_p
4493 we can't really report the problems of integral offsets. So leave
f676971a 4494 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4495 sub-words of a TFmode operand, which is what we had before. */
4496
4497bool
a2369ed3 4498rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4499{
4500 switch (GET_CODE (addr))
4501 {
4502 case PLUS:
4503 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4504 {
4505 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4506 return val + 12 + 0x8000 >= 0x10000;
4507 }
4508 break;
4509
4510 case LO_SUM:
4511 return true;
4512
619fe064 4513 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4514 case PRE_MODIFY:
4515 return TARGET_UPDATE;
4d588c14
RH
4516
4517 default:
4518 break;
4519 }
4520
4521 return false;
4522}
d8ecbcdb 4523
944258eb
RS
4524/* Implement FIND_BASE_TERM. */
4525
4526rtx
4527rs6000_find_base_term (rtx op)
4528{
4529 rtx base, offset;
4530
4531 split_const (op, &base, &offset);
4532 if (GET_CODE (base) == UNSPEC)
4533 switch (XINT (base, 1))
4534 {
4535 case UNSPEC_TOCREL:
4536 case UNSPEC_MACHOPIC_OFFSET:
4537 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4538 for aliasing purposes. */
4539 return XVECEXP (base, 0, 0);
4540 }
4541
4542 return op;
4543}
4544
d04b6e6e
EB
4545/* More elaborate version of recog's offsettable_memref_p predicate
4546 that works around the ??? note of rs6000_mode_dependent_address.
4547 In particular it accepts
4548
4549 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4550
4551 in 32-bit mode, that the recog predicate rejects. */
4552
4553bool
4554rs6000_offsettable_memref_p (rtx op)
4555{
4556 if (!MEM_P (op))
4557 return false;
4558
4559 /* First mimic offsettable_memref_p. */
4560 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4561 return true;
4562
4563 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4564 the latter predicate knows nothing about the mode of the memory
4565 reference and, therefore, assumes that it is the largest supported
4566 mode (TFmode). As a consequence, legitimate offsettable memory
4567 references are rejected. rs6000_legitimate_offset_address_p contains
4568 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4569 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4570}
4571
d8ecbcdb
AH
4572/* Return number of consecutive hard regs needed starting at reg REGNO
4573 to hold something of mode MODE.
4574 This is ordinarily the length in words of a value of mode MODE
4575 but can be less for certain modes in special long registers.
4576
4577 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4578 scalar instructions. The upper 32 bits are only available to the
4579 SIMD instructions.
4580
4581 POWER and PowerPC GPRs hold 32 bits worth;
4582 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4583
4584int
4585rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4586{
4587 if (FP_REGNO_P (regno))
4588 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4589
4590 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4591 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4592
4593 if (ALTIVEC_REGNO_P (regno))
4594 return
4595 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4596
8521c414
JM
4597 /* The value returned for SCmode in the E500 double case is 2 for
4598 ABI compatibility; storing an SCmode value in a single register
4599 would require function_arg and rs6000_spe_function_arg to handle
4600 SCmode so as to pass the value correctly in a pair of
4601 registers. */
4f011e1e
JM
4602 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4603 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4604 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4605
d8ecbcdb
AH
4606 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4607}
2aa4498c
AH
4608
4609/* Change register usage conditional on target flags. */
4610void
4611rs6000_conditional_register_usage (void)
4612{
4613 int i;
4614
4615 /* Set MQ register fixed (already call_used) if not POWER
4616 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4617 be allocated. */
4618 if (! TARGET_POWER)
4619 fixed_regs[64] = 1;
4620
7c9ac5c0 4621 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4622 if (TARGET_64BIT)
4623 fixed_regs[13] = call_used_regs[13]
4624 = call_really_used_regs[13] = 1;
4625
4626 /* Conditionally disable FPRs. */
4627 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4628 for (i = 32; i < 64; i++)
4629 fixed_regs[i] = call_used_regs[i]
c4ad648e 4630 = call_really_used_regs[i] = 1;
2aa4498c 4631
7c9ac5c0
PH
4632 /* The TOC register is not killed across calls in a way that is
4633 visible to the compiler. */
4634 if (DEFAULT_ABI == ABI_AIX)
4635 call_really_used_regs[2] = 0;
4636
2aa4498c
AH
4637 if (DEFAULT_ABI == ABI_V4
4638 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4639 && flag_pic == 2)
4640 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4641
4642 if (DEFAULT_ABI == ABI_V4
4643 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4644 && flag_pic == 1)
4645 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4646 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4647 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4648
4649 if (DEFAULT_ABI == ABI_DARWIN
4650 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4651 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4652 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4653 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4654
b4db40bf
JJ
4655 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4656 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4657 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4658
2aa4498c
AH
4659 if (TARGET_SPE)
4660 {
4661 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4662 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4663 registers in prologues and epilogues. We no longer use r14
4664 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4665 pool for link-compatibility with older versions of GCC. Once
4666 "old" code has died out, we can return r14 to the allocation
4667 pool. */
4668 fixed_regs[14]
4669 = call_used_regs[14]
4670 = call_really_used_regs[14] = 1;
2aa4498c
AH
4671 }
4672
0db747be 4673 if (!TARGET_ALTIVEC)
2aa4498c
AH
4674 {
4675 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4676 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4677 call_really_used_regs[VRSAVE_REGNO] = 1;
4678 }
4679
0db747be
DE
4680 if (TARGET_ALTIVEC)
4681 global_regs[VSCR_REGNO] = 1;
4682
2aa4498c 4683 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4684 {
4685 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4686 call_used_regs[i] = call_really_used_regs[i] = 1;
4687
4688 /* AIX reserves VR20:31 in non-extended ABI mode. */
4689 if (TARGET_XCOFF)
4690 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4691 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4692 }
2aa4498c 4693}
fb4d4348 4694\f
a4f6c312
SS
4695/* Try to output insns to set TARGET equal to the constant C if it can
4696 be done in less than N insns. Do all computations in MODE.
4697 Returns the place where the output has been placed if it can be
4698 done and the insns have been emitted. If it would take more than N
4699 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4700
4701rtx
f676971a 4702rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4703 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4704{
af8cb5c5 4705 rtx result, insn, set;
2bfcf297
DB
4706 HOST_WIDE_INT c0, c1;
4707
37409796 4708 switch (mode)
2bfcf297 4709 {
37409796
NS
4710 case QImode:
4711 case HImode:
2bfcf297 4712 if (dest == NULL)
c4ad648e 4713 dest = gen_reg_rtx (mode);
2bfcf297
DB
4714 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4715 return dest;
bb8df8a6 4716
37409796 4717 case SImode:
b3a13419 4718 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4719
d448860e 4720 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4721 GEN_INT (INTVAL (source)
4722 & (~ (HOST_WIDE_INT) 0xffff))));
4723 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4724 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4725 GEN_INT (INTVAL (source) & 0xffff))));
4726 result = dest;
37409796
NS
4727 break;
4728
4729 case DImode:
4730 switch (GET_CODE (source))
af8cb5c5 4731 {
37409796 4732 case CONST_INT:
af8cb5c5
DE
4733 c0 = INTVAL (source);
4734 c1 = -(c0 < 0);
37409796 4735 break;
bb8df8a6 4736
37409796 4737 case CONST_DOUBLE:
2bfcf297 4738#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4739 c0 = CONST_DOUBLE_LOW (source);
4740 c1 = -(c0 < 0);
2bfcf297 4741#else
af8cb5c5
DE
4742 c0 = CONST_DOUBLE_LOW (source);
4743 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4744#endif
37409796
NS
4745 break;
4746
4747 default:
4748 gcc_unreachable ();
af8cb5c5 4749 }
af8cb5c5
DE
4750
4751 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4752 break;
4753
4754 default:
4755 gcc_unreachable ();
2bfcf297 4756 }
2bfcf297 4757
af8cb5c5
DE
4758 insn = get_last_insn ();
4759 set = single_set (insn);
4760 if (! CONSTANT_P (SET_SRC (set)))
4761 set_unique_reg_note (insn, REG_EQUAL, source);
4762
4763 return result;
2bfcf297
DB
4764}
4765
4766/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4767 fall back to a straight forward decomposition. We do this to avoid
4768 exponential run times encountered when looking for longer sequences
4769 with rs6000_emit_set_const. */
4770static rtx
a2369ed3 4771rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4772{
4773 if (!TARGET_POWERPC64)
4774 {
4775 rtx operand1, operand2;
4776
4777 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4778 DImode);
d448860e 4779 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4780 DImode);
4781 emit_move_insn (operand1, GEN_INT (c1));
4782 emit_move_insn (operand2, GEN_INT (c2));
4783 }
4784 else
4785 {
bc06712d 4786 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4787
bc06712d 4788 ud1 = c1 & 0xffff;
f921c9c9 4789 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4790#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4791 c2 = c1 >> 32;
2bfcf297 4792#endif
bc06712d 4793 ud3 = c2 & 0xffff;
f921c9c9 4794 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4795
f676971a 4796 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4797 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4798 {
bc06712d 4799 if (ud1 & 0x8000)
b78d48dd 4800 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4801 else
4802 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4803 }
2bfcf297 4804
f676971a 4805 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4806 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4807 {
bc06712d 4808 if (ud2 & 0x8000)
f676971a 4809 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4810 - 0x80000000));
252b88f7 4811 else
bc06712d
TR
4812 emit_move_insn (dest, GEN_INT (ud2 << 16));
4813 if (ud1 != 0)
d448860e
JH
4814 emit_move_insn (copy_rtx (dest),
4815 gen_rtx_IOR (DImode, copy_rtx (dest),
4816 GEN_INT (ud1)));
252b88f7 4817 }
f676971a 4818 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4819 || (ud4 == 0 && ! (ud3 & 0x8000)))
4820 {
4821 if (ud3 & 0x8000)
f676971a 4822 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4823 - 0x80000000));
4824 else
4825 emit_move_insn (dest, GEN_INT (ud3 << 16));
4826
4827 if (ud2 != 0)
d448860e
JH
4828 emit_move_insn (copy_rtx (dest),
4829 gen_rtx_IOR (DImode, copy_rtx (dest),
4830 GEN_INT (ud2)));
4831 emit_move_insn (copy_rtx (dest),
4832 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4833 GEN_INT (16)));
bc06712d 4834 if (ud1 != 0)
d448860e
JH
4835 emit_move_insn (copy_rtx (dest),
4836 gen_rtx_IOR (DImode, copy_rtx (dest),
4837 GEN_INT (ud1)));
bc06712d 4838 }
f676971a 4839 else
bc06712d
TR
4840 {
4841 if (ud4 & 0x8000)
f676971a 4842 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4843 - 0x80000000));
4844 else
4845 emit_move_insn (dest, GEN_INT (ud4 << 16));
4846
4847 if (ud3 != 0)
d448860e
JH
4848 emit_move_insn (copy_rtx (dest),
4849 gen_rtx_IOR (DImode, copy_rtx (dest),
4850 GEN_INT (ud3)));
2bfcf297 4851
d448860e
JH
4852 emit_move_insn (copy_rtx (dest),
4853 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4854 GEN_INT (32)));
bc06712d 4855 if (ud2 != 0)
d448860e
JH
4856 emit_move_insn (copy_rtx (dest),
4857 gen_rtx_IOR (DImode, copy_rtx (dest),
4858 GEN_INT (ud2 << 16)));
bc06712d 4859 if (ud1 != 0)
d448860e
JH
4860 emit_move_insn (copy_rtx (dest),
4861 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4862 }
4863 }
2bfcf297
DB
4864 return dest;
4865}
4866
76d2b81d 4867/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4868 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4869
4870static void
4871rs6000_eliminate_indexed_memrefs (rtx operands[2])
4872{
4873 if (GET_CODE (operands[0]) == MEM
4874 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4875 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4876 && ! reload_in_progress)
4877 operands[0]
4878 = replace_equiv_address (operands[0],
4879 copy_addr_to_reg (XEXP (operands[0], 0)));
4880
4881 if (GET_CODE (operands[1]) == MEM
4882 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4883 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4884 && ! reload_in_progress)
4885 operands[1]
4886 = replace_equiv_address (operands[1],
4887 copy_addr_to_reg (XEXP (operands[1], 0)));
4888}
4889
fb4d4348
GK
4890/* Emit a move from SOURCE to DEST in mode MODE. */
4891void
a2369ed3 4892rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4893{
4894 rtx operands[2];
4895 operands[0] = dest;
4896 operands[1] = source;
f676971a 4897
fb4d4348
GK
4898 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4899 if (GET_CODE (operands[1]) == CONST_DOUBLE
4900 && ! FLOAT_MODE_P (mode)
4901 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4902 {
4903 /* FIXME. This should never happen. */
4904 /* Since it seems that it does, do the safe thing and convert
4905 to a CONST_INT. */
2496c7bd 4906 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4907 }
37409796
NS
4908 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4909 || FLOAT_MODE_P (mode)
4910 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4911 || CONST_DOUBLE_LOW (operands[1]) < 0)
4912 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4913 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4914
c9e8cb32
DD
4915 /* Check if GCC is setting up a block move that will end up using FP
4916 registers as temporaries. We must make sure this is acceptable. */
4917 if (GET_CODE (operands[0]) == MEM
4918 && GET_CODE (operands[1]) == MEM
4919 && mode == DImode
41543739
GK
4920 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4921 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4922 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4923 ? 32 : MEM_ALIGN (operands[0])))
4924 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4925 ? 32
41543739
GK
4926 : MEM_ALIGN (operands[1]))))
4927 && ! MEM_VOLATILE_P (operands [0])
4928 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4929 {
41543739
GK
4930 emit_move_insn (adjust_address (operands[0], SImode, 0),
4931 adjust_address (operands[1], SImode, 0));
d448860e
JH
4932 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4933 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4934 return;
4935 }
630d42a0 4936
b3a13419 4937 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4938 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4939 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4940
a3170dc6 4941 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 4942 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 4943 && GET_CODE (operands[0]) == MEM)
fb4d4348 4944 {
ffc14f31
GK
4945 int regnum;
4946
4947 if (reload_in_progress || reload_completed)
4948 regnum = true_regnum (operands[1]);
4949 else if (GET_CODE (operands[1]) == REG)
4950 regnum = REGNO (operands[1]);
4951 else
4952 regnum = -1;
f676971a 4953
fb4d4348
GK
4954 /* If operands[1] is a register, on POWER it may have
4955 double-precision data in it, so truncate it to single
4956 precision. */
4957 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4958 {
4959 rtx newreg;
b3a13419 4960 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4961 : gen_reg_rtx (mode));
fb4d4348
GK
4962 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4963 operands[1] = newreg;
4964 }
4965 }
4966
c4501e62
JJ
4967 /* Recognize the case where operand[1] is a reference to thread-local
4968 data and load its address to a register. */
84f52ebd 4969 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4970 {
84f52ebd
RH
4971 enum tls_model model;
4972 rtx tmp = operands[1];
4973 rtx addend = NULL;
4974
4975 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4976 {
4977 addend = XEXP (XEXP (tmp, 0), 1);
4978 tmp = XEXP (XEXP (tmp, 0), 0);
4979 }
4980
4981 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4982 model = SYMBOL_REF_TLS_MODEL (tmp);
4983 gcc_assert (model != 0);
4984
4985 tmp = rs6000_legitimize_tls_address (tmp, model);
4986 if (addend)
4987 {
4988 tmp = gen_rtx_PLUS (mode, tmp, addend);
4989 tmp = force_operand (tmp, operands[0]);
4990 }
4991 operands[1] = tmp;
c4501e62
JJ
4992 }
4993
8f4e6caf
RH
4994 /* Handle the case where reload calls us with an invalid address. */
4995 if (reload_in_progress && mode == Pmode
69ef87e2 4996 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4997 || ! nonimmediate_operand (operands[0], mode)))
4998 goto emit_set;
4999
a9baceb1
GK
5000 /* 128-bit constant floating-point values on Darwin should really be
5001 loaded as two parts. */
8521c414 5002 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
5003 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5004 {
5005 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5006 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
5007 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5008 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5009 simplify_gen_subreg (imode, operands[1], mode, 0),
5010 imode);
5011 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5012 GET_MODE_SIZE (imode)),
5013 simplify_gen_subreg (imode, operands[1], mode,
5014 GET_MODE_SIZE (imode)),
5015 imode);
a9baceb1
GK
5016 return;
5017 }
5018
e41b2a33
PB
5019 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5020 cfun->machine->sdmode_stack_slot =
5021 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5022
5023 if (reload_in_progress
5024 && mode == SDmode
5025 && MEM_P (operands[0])
5026 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5027 && REG_P (operands[1]))
5028 {
5029 if (FP_REGNO_P (REGNO (operands[1])))
5030 {
5031 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5032 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5033 emit_insn (gen_movsd_store (mem, operands[1]));
5034 }
5035 else if (INT_REGNO_P (REGNO (operands[1])))
5036 {
5037 rtx mem = adjust_address_nv (operands[0], mode, 4);
5038 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5039 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5040 }
5041 else
5042 gcc_unreachable();
5043 return;
5044 }
5045 if (reload_in_progress
5046 && mode == SDmode
5047 && REG_P (operands[0])
5048 && MEM_P (operands[1])
5049 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5050 {
5051 if (FP_REGNO_P (REGNO (operands[0])))
5052 {
5053 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5054 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5055 emit_insn (gen_movsd_load (operands[0], mem));
5056 }
5057 else if (INT_REGNO_P (REGNO (operands[0])))
5058 {
5059 rtx mem = adjust_address_nv (operands[1], mode, 4);
5060 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5061 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5062 }
5063 else
5064 gcc_unreachable();
5065 return;
5066 }
5067
fb4d4348
GK
5068 /* FIXME: In the long term, this switch statement should go away
5069 and be replaced by a sequence of tests based on things like
5070 mode == Pmode. */
5071 switch (mode)
5072 {
5073 case HImode:
5074 case QImode:
5075 if (CONSTANT_P (operands[1])
5076 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5077 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5078 break;
5079
06f4e019 5080 case TFmode:
7393f7f8 5081 case TDmode:
76d2b81d
DJ
5082 rs6000_eliminate_indexed_memrefs (operands);
5083 /* fall through */
5084
fb4d4348 5085 case DFmode:
7393f7f8 5086 case DDmode:
fb4d4348 5087 case SFmode:
e41b2a33 5088 case SDmode:
f676971a 5089 if (CONSTANT_P (operands[1])
fb4d4348 5090 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5091 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5092 break;
f676971a 5093
0ac081f6
AH
5094 case V16QImode:
5095 case V8HImode:
5096 case V4SFmode:
5097 case V4SImode:
a3170dc6
AH
5098 case V4HImode:
5099 case V2SFmode:
5100 case V2SImode:
00a892b8 5101 case V1DImode:
69ef87e2 5102 if (CONSTANT_P (operands[1])
d744e06e 5103 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5104 operands[1] = force_const_mem (mode, operands[1]);
5105 break;
f676971a 5106
fb4d4348 5107 case SImode:
a9098fd0 5108 case DImode:
fb4d4348
GK
5109 /* Use default pattern for address of ELF small data */
5110 if (TARGET_ELF
a9098fd0 5111 && mode == Pmode
f607bc57 5112 && DEFAULT_ABI == ABI_V4
f676971a 5113 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5114 || GET_CODE (operands[1]) == CONST)
5115 && small_data_operand (operands[1], mode))
fb4d4348
GK
5116 {
5117 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5118 return;
5119 }
5120
f607bc57 5121 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5122 && mode == Pmode && mode == SImode
5123 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5124 {
5125 emit_insn (gen_movsi_got (operands[0], operands[1]));
5126 return;
5127 }
5128
ee890fe2 5129 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5130 && TARGET_NO_TOC
5131 && ! flag_pic
a9098fd0 5132 && mode == Pmode
fb4d4348
GK
5133 && CONSTANT_P (operands[1])
5134 && GET_CODE (operands[1]) != HIGH
5135 && GET_CODE (operands[1]) != CONST_INT)
5136 {
b3a13419
ILT
5137 rtx target = (!can_create_pseudo_p ()
5138 ? operands[0]
5139 : gen_reg_rtx (mode));
fb4d4348
GK
5140
5141 /* If this is a function address on -mcall-aixdesc,
5142 convert it to the address of the descriptor. */
5143 if (DEFAULT_ABI == ABI_AIX
5144 && GET_CODE (operands[1]) == SYMBOL_REF
5145 && XSTR (operands[1], 0)[0] == '.')
5146 {
5147 const char *name = XSTR (operands[1], 0);
5148 rtx new_ref;
5149 while (*name == '.')
5150 name++;
5151 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5152 CONSTANT_POOL_ADDRESS_P (new_ref)
5153 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5154 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5155 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5156 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5157 operands[1] = new_ref;
5158 }
7509c759 5159
ee890fe2
SS
5160 if (DEFAULT_ABI == ABI_DARWIN)
5161 {
ab82a49f
AP
5162#if TARGET_MACHO
5163 if (MACHO_DYNAMIC_NO_PIC_P)
5164 {
5165 /* Take care of any required data indirection. */
5166 operands[1] = rs6000_machopic_legitimize_pic_address (
5167 operands[1], mode, operands[0]);
5168 if (operands[0] != operands[1])
5169 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5170 operands[0], operands[1]));
ab82a49f
AP
5171 return;
5172 }
5173#endif
b8a55285
AP
5174 emit_insn (gen_macho_high (target, operands[1]));
5175 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5176 return;
5177 }
5178
fb4d4348
GK
5179 emit_insn (gen_elf_high (target, operands[1]));
5180 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5181 return;
5182 }
5183
a9098fd0
GK
5184 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5185 and we have put it in the TOC, we just need to make a TOC-relative
5186 reference to it. */
5187 if (TARGET_TOC
5188 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5189 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5190 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5191 get_pool_mode (operands[1])))
fb4d4348 5192 {
a9098fd0 5193 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5194 }
a9098fd0
GK
5195 else if (mode == Pmode
5196 && CONSTANT_P (operands[1])
38886f37
AO
5197 && ((GET_CODE (operands[1]) != CONST_INT
5198 && ! easy_fp_constant (operands[1], mode))
5199 || (GET_CODE (operands[1]) == CONST_INT
5200 && num_insns_constant (operands[1], mode) > 2)
5201 || (GET_CODE (operands[0]) == REG
5202 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5203 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5204 && ! legitimate_constant_pool_address_p (operands[1])
5205 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5206 {
5207 /* Emit a USE operation so that the constant isn't deleted if
5208 expensive optimizations are turned on because nobody
5209 references it. This should only be done for operands that
5210 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5211 This should not be done for operands that contain LABEL_REFs.
5212 For now, we just handle the obvious case. */
5213 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5214 emit_use (operands[1]);
fb4d4348 5215
c859cda6 5216#if TARGET_MACHO
ee890fe2 5217 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5218 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5219 {
ee890fe2
SS
5220 operands[1] =
5221 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5222 operands[0]);
5223 if (operands[0] != operands[1])
5224 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5225 return;
5226 }
c859cda6 5227#endif
ee890fe2 5228
fb4d4348
GK
5229 /* If we are to limit the number of things we put in the TOC and
5230 this is a symbol plus a constant we can add in one insn,
5231 just put the symbol in the TOC and add the constant. Don't do
5232 this if reload is in progress. */
5233 if (GET_CODE (operands[1]) == CONST
5234 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5235 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5236 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5237 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5238 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5239 && ! side_effects_p (operands[0]))
5240 {
a4f6c312
SS
5241 rtx sym =
5242 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5243 rtx other = XEXP (XEXP (operands[1], 0), 1);
5244
a9098fd0
GK
5245 sym = force_reg (mode, sym);
5246 if (mode == SImode)
5247 emit_insn (gen_addsi3 (operands[0], sym, other));
5248 else
5249 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5250 return;
5251 }
5252
a9098fd0 5253 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5254
f676971a 5255 if (TARGET_TOC
0cdc04e8 5256 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5257 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5258 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5259 get_pool_constant (XEXP (operands[1], 0)),
5260 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5261 {
ba4828e0 5262 operands[1]
542a8afa 5263 = gen_const_mem (mode,
c4ad648e 5264 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5265 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5266 }
fb4d4348
GK
5267 }
5268 break;
a9098fd0 5269
fb4d4348 5270 case TImode:
76d2b81d
DJ
5271 rs6000_eliminate_indexed_memrefs (operands);
5272
27dc0551
DE
5273 if (TARGET_POWER)
5274 {
5275 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5276 gen_rtvec (2,
5277 gen_rtx_SET (VOIDmode,
5278 operands[0], operands[1]),
5279 gen_rtx_CLOBBER (VOIDmode,
5280 gen_rtx_SCRATCH (SImode)))));
5281 return;
5282 }
fb4d4348
GK
5283 break;
5284
5285 default:
37409796 5286 gcc_unreachable ();
fb4d4348
GK
5287 }
5288
a9098fd0
GK
5289 /* Above, we may have called force_const_mem which may have returned
5290 an invalid address. If we can, fix this up; otherwise, reload will
5291 have to deal with it. */
8f4e6caf
RH
5292 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5293 operands[1] = validize_mem (operands[1]);
a9098fd0 5294
8f4e6caf 5295 emit_set:
fb4d4348
GK
5296 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5297}
4697a36c 5298\f
2858f73a
GK
5299/* Nonzero if we can use a floating-point register to pass this arg. */
5300#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5301 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5302 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5303 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5304
5305/* Nonzero if we can use an AltiVec register to pass this arg. */
5306#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5307 (ALTIVEC_VECTOR_MODE (MODE) \
5308 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5309 && TARGET_ALTIVEC_ABI \
83953138 5310 && (NAMED))
2858f73a 5311
c6e8c921
GK
5312/* Return a nonzero value to say to return the function value in
5313 memory, just as large structures are always returned. TYPE will be
5314 the data type of the value, and FNTYPE will be the type of the
5315 function doing the returning, or @code{NULL} for libcalls.
5316
5317 The AIX ABI for the RS/6000 specifies that all structures are
5318 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5319 specifies that structures <= 8 bytes are returned in r3/r4, but a
5320 draft put them in memory, and GCC used to implement the draft
df01da37 5321 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5322 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5323 compatibility can change DRAFT_V4_STRUCT_RET to override the
5324 default, and -m switches get the final word. See
5325 rs6000_override_options for more details.
5326
5327 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5328 long double support is enabled. These values are returned in memory.
5329
5330 int_size_in_bytes returns -1 for variable size objects, which go in
5331 memory always. The cast to unsigned makes -1 > 8. */
5332
5333static bool
586de218 5334rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5335{
594a51fe
SS
5336 /* In the darwin64 abi, try to use registers for larger structs
5337 if possible. */
0b5383eb 5338 if (rs6000_darwin64_abi
594a51fe 5339 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5340 && int_size_in_bytes (type) > 0)
5341 {
5342 CUMULATIVE_ARGS valcum;
5343 rtx valret;
5344
5345 valcum.words = 0;
5346 valcum.fregno = FP_ARG_MIN_REG;
5347 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5348 /* Do a trial code generation as if this were going to be passed
5349 as an argument; if any part goes in memory, we return NULL. */
5350 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5351 if (valret)
5352 return false;
5353 /* Otherwise fall through to more conventional ABI rules. */
5354 }
594a51fe 5355
c6e8c921 5356 if (AGGREGATE_TYPE_P (type)
df01da37 5357 && (aix_struct_return
c6e8c921
GK
5358 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5359 return true;
b693336b 5360
bada2eb8
DE
5361 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5362 modes only exist for GCC vector types if -maltivec. */
5363 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5364 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5365 return false;
5366
b693336b
PB
5367 /* Return synthetic vectors in memory. */
5368 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5369 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5370 {
5371 static bool warned_for_return_big_vectors = false;
5372 if (!warned_for_return_big_vectors)
5373 {
d4ee4d25 5374 warning (0, "GCC vector returned by reference: "
b693336b
PB
5375 "non-standard ABI extension with no compatibility guarantee");
5376 warned_for_return_big_vectors = true;
5377 }
5378 return true;
5379 }
5380
602ea4d3 5381 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5382 return true;
ad630bef 5383
c6e8c921
GK
5384 return false;
5385}
5386
4697a36c
MM
5387/* Initialize a variable CUM of type CUMULATIVE_ARGS
5388 for a call to a function whose data type is FNTYPE.
5389 For a library call, FNTYPE is 0.
5390
5391 For incoming args we set the number of arguments in the prototype large
1c20ae99 5392 so we never return a PARALLEL. */
4697a36c
MM
5393
5394void
f676971a 5395init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5396 rtx libname ATTRIBUTE_UNUSED, int incoming,
5397 int libcall, int n_named_args)
4697a36c
MM
5398{
5399 static CUMULATIVE_ARGS zero_cumulative;
5400
5401 *cum = zero_cumulative;
5402 cum->words = 0;
5403 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5404 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5405 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5406 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5407 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5408 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5409 cum->stdarg = fntype
5410 && (TYPE_ARG_TYPES (fntype) != 0
5411 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5412 != void_type_node));
4697a36c 5413
0f6937fe
AM
5414 cum->nargs_prototype = 0;
5415 if (incoming || cum->prototype)
5416 cum->nargs_prototype = n_named_args;
4697a36c 5417
a5c76ee6 5418 /* Check for a longcall attribute. */
3eb4e360
AM
5419 if ((!fntype && rs6000_default_long_calls)
5420 || (fntype
5421 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5422 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5423 cum->call_cookie |= CALL_LONG;
6a4cee5f 5424
4697a36c
MM
5425 if (TARGET_DEBUG_ARG)
5426 {
5427 fprintf (stderr, "\ninit_cumulative_args:");
5428 if (fntype)
5429 {
5430 tree ret_type = TREE_TYPE (fntype);
5431 fprintf (stderr, " ret code = %s,",
5432 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5433 }
5434
6a4cee5f
MM
5435 if (cum->call_cookie & CALL_LONG)
5436 fprintf (stderr, " longcall,");
5437
4697a36c
MM
5438 fprintf (stderr, " proto = %d, nargs = %d\n",
5439 cum->prototype, cum->nargs_prototype);
5440 }
f676971a 5441
c4ad648e
AM
5442 if (fntype
5443 && !TARGET_ALTIVEC
5444 && TARGET_ALTIVEC_ABI
5445 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5446 {
c85ce869 5447 error ("cannot return value in vector register because"
c4ad648e 5448 " altivec instructions are disabled, use -maltivec"
c85ce869 5449 " to enable them");
c4ad648e 5450 }
4697a36c
MM
5451}
5452\f
fe984136
RH
5453/* Return true if TYPE must be passed on the stack and not in registers. */
5454
5455static bool
586de218 5456rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5457{
5458 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5459 return must_pass_in_stack_var_size (mode, type);
5460 else
5461 return must_pass_in_stack_var_size_or_pad (mode, type);
5462}
5463
c229cba9
DE
5464/* If defined, a C expression which determines whether, and in which
5465 direction, to pad out an argument with extra space. The value
5466 should be of type `enum direction': either `upward' to pad above
5467 the argument, `downward' to pad below, or `none' to inhibit
5468 padding.
5469
5470 For the AIX ABI structs are always stored left shifted in their
5471 argument slot. */
5472
9ebbca7d 5473enum direction
586de218 5474function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5475{
6e985040
AM
5476#ifndef AGGREGATE_PADDING_FIXED
5477#define AGGREGATE_PADDING_FIXED 0
5478#endif
5479#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5480#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5481#endif
5482
5483 if (!AGGREGATE_PADDING_FIXED)
5484 {
5485 /* GCC used to pass structures of the same size as integer types as
5486 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5487 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5488 passed padded downward, except that -mstrict-align further
5489 muddied the water in that multi-component structures of 2 and 4
5490 bytes in size were passed padded upward.
5491
5492 The following arranges for best compatibility with previous
5493 versions of gcc, but removes the -mstrict-align dependency. */
5494 if (BYTES_BIG_ENDIAN)
5495 {
5496 HOST_WIDE_INT size = 0;
5497
5498 if (mode == BLKmode)
5499 {
5500 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5501 size = int_size_in_bytes (type);
5502 }
5503 else
5504 size = GET_MODE_SIZE (mode);
5505
5506 if (size == 1 || size == 2 || size == 4)
5507 return downward;
5508 }
5509 return upward;
5510 }
5511
5512 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5513 {
5514 if (type != 0 && AGGREGATE_TYPE_P (type))
5515 return upward;
5516 }
c229cba9 5517
d3704c46
KH
5518 /* Fall back to the default. */
5519 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5520}
5521
b6c9286a 5522/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5523 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5524 PARM_BOUNDARY is used for all arguments.
f676971a 5525
84e9ad15
AM
5526 V.4 wants long longs and doubles to be double word aligned. Just
5527 testing the mode size is a boneheaded way to do this as it means
5528 that other types such as complex int are also double word aligned.
5529 However, we're stuck with this because changing the ABI might break
5530 existing library interfaces.
5531
b693336b
PB
5532 Doubleword align SPE vectors.
5533 Quadword align Altivec vectors.
5534 Quadword align large synthetic vector types. */
b6c9286a
MM
5535
5536int
b693336b 5537function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5538{
84e9ad15
AM
5539 if (DEFAULT_ABI == ABI_V4
5540 && (GET_MODE_SIZE (mode) == 8
5541 || (TARGET_HARD_FLOAT
5542 && TARGET_FPRS
7393f7f8 5543 && (mode == TFmode || mode == TDmode))))
4ed78545 5544 return 64;
ad630bef
DE
5545 else if (SPE_VECTOR_MODE (mode)
5546 || (type && TREE_CODE (type) == VECTOR_TYPE
5547 && int_size_in_bytes (type) >= 8
5548 && int_size_in_bytes (type) < 16))
e1f83b4d 5549 return 64;
ad630bef
DE
5550 else if (ALTIVEC_VECTOR_MODE (mode)
5551 || (type && TREE_CODE (type) == VECTOR_TYPE
5552 && int_size_in_bytes (type) >= 16))
0ac081f6 5553 return 128;
0b5383eb
DJ
5554 else if (rs6000_darwin64_abi && mode == BLKmode
5555 && type && TYPE_ALIGN (type) > 64)
5556 return 128;
9ebbca7d 5557 else
b6c9286a 5558 return PARM_BOUNDARY;
b6c9286a 5559}
c53bdcf5 5560
294bd182
AM
5561/* For a function parm of MODE and TYPE, return the starting word in
5562 the parameter area. NWORDS of the parameter area are already used. */
5563
5564static unsigned int
5565rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5566{
5567 unsigned int align;
5568 unsigned int parm_offset;
5569
5570 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5571 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5572 return nwords + (-(parm_offset + nwords) & align);
5573}
5574
c53bdcf5
AM
5575/* Compute the size (in words) of a function argument. */
5576
5577static unsigned long
5578rs6000_arg_size (enum machine_mode mode, tree type)
5579{
5580 unsigned long size;
5581
5582 if (mode != BLKmode)
5583 size = GET_MODE_SIZE (mode);
5584 else
5585 size = int_size_in_bytes (type);
5586
5587 if (TARGET_32BIT)
5588 return (size + 3) >> 2;
5589 else
5590 return (size + 7) >> 3;
5591}
b6c9286a 5592\f
0b5383eb 5593/* Use this to flush pending int fields. */
594a51fe
SS
5594
5595static void
0b5383eb
DJ
5596rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5597 HOST_WIDE_INT bitpos)
594a51fe 5598{
0b5383eb
DJ
5599 unsigned int startbit, endbit;
5600 int intregs, intoffset;
5601 enum machine_mode mode;
594a51fe 5602
0b5383eb
DJ
5603 if (cum->intoffset == -1)
5604 return;
594a51fe 5605
0b5383eb
DJ
5606 intoffset = cum->intoffset;
5607 cum->intoffset = -1;
5608
5609 if (intoffset % BITS_PER_WORD != 0)
5610 {
5611 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5612 MODE_INT, 0);
5613 if (mode == BLKmode)
594a51fe 5614 {
0b5383eb
DJ
5615 /* We couldn't find an appropriate mode, which happens,
5616 e.g., in packed structs when there are 3 bytes to load.
5617 Back intoffset back to the beginning of the word in this
5618 case. */
5619 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5620 }
594a51fe 5621 }
0b5383eb
DJ
5622
5623 startbit = intoffset & -BITS_PER_WORD;
5624 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5625 intregs = (endbit - startbit) / BITS_PER_WORD;
5626 cum->words += intregs;
5627}
5628
5629/* The darwin64 ABI calls for us to recurse down through structs,
5630 looking for elements passed in registers. Unfortunately, we have
5631 to track int register count here also because of misalignments
5632 in powerpc alignment mode. */
5633
5634static void
5635rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5636 tree type,
5637 HOST_WIDE_INT startbitpos)
5638{
5639 tree f;
5640
5641 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5642 if (TREE_CODE (f) == FIELD_DECL)
5643 {
5644 HOST_WIDE_INT bitpos = startbitpos;
5645 tree ftype = TREE_TYPE (f);
70fb00df
AP
5646 enum machine_mode mode;
5647 if (ftype == error_mark_node)
5648 continue;
5649 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5650
5651 if (DECL_SIZE (f) != 0
5652 && host_integerp (bit_position (f), 1))
5653 bitpos += int_bit_position (f);
5654
5655 /* ??? FIXME: else assume zero offset. */
5656
5657 if (TREE_CODE (ftype) == RECORD_TYPE)
5658 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5659 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5660 {
5661 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5662 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5663 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5664 }
5665 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5666 {
5667 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5668 cum->vregno++;
5669 cum->words += 2;
5670 }
5671 else if (cum->intoffset == -1)
5672 cum->intoffset = bitpos;
5673 }
594a51fe
SS
5674}
5675
4697a36c
MM
5676/* Update the data in CUM to advance over an argument
5677 of mode MODE and data type TYPE.
b2d04ecf
AM
5678 (TYPE is null for libcalls where that information may not be available.)
5679
5680 Note that for args passed by reference, function_arg will be called
5681 with MODE and TYPE set to that of the pointer to the arg, not the arg
5682 itself. */
4697a36c
MM
5683
5684void
f676971a 5685function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5686 tree type, int named, int depth)
4697a36c 5687{
0b5383eb
DJ
5688 int size;
5689
594a51fe
SS
5690 /* Only tick off an argument if we're not recursing. */
5691 if (depth == 0)
5692 cum->nargs_prototype--;
4697a36c 5693
ad630bef
DE
5694 if (TARGET_ALTIVEC_ABI
5695 && (ALTIVEC_VECTOR_MODE (mode)
5696 || (type && TREE_CODE (type) == VECTOR_TYPE
5697 && int_size_in_bytes (type) == 16)))
0ac081f6 5698 {
4ed78545
AM
5699 bool stack = false;
5700
2858f73a 5701 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5702 {
6d0ef01e
HP
5703 cum->vregno++;
5704 if (!TARGET_ALTIVEC)
c85ce869 5705 error ("cannot pass argument in vector register because"
6d0ef01e 5706 " altivec instructions are disabled, use -maltivec"
c85ce869 5707 " to enable them");
4ed78545
AM
5708
5709 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5710 even if it is going to be passed in a vector register.
4ed78545
AM
5711 Darwin does the same for variable-argument functions. */
5712 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5713 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5714 stack = true;
6d0ef01e 5715 }
4ed78545
AM
5716 else
5717 stack = true;
5718
5719 if (stack)
c4ad648e 5720 {
a594a19c 5721 int align;
f676971a 5722
2858f73a
GK
5723 /* Vector parameters must be 16-byte aligned. This places
5724 them at 2 mod 4 in terms of words in 32-bit mode, since
5725 the parameter save area starts at offset 24 from the
5726 stack. In 64-bit mode, they just have to start on an
5727 even word, since the parameter save area is 16-byte
5728 aligned. Space for GPRs is reserved even if the argument
5729 will be passed in memory. */
5730 if (TARGET_32BIT)
4ed78545 5731 align = (2 - cum->words) & 3;
2858f73a
GK
5732 else
5733 align = cum->words & 1;
c53bdcf5 5734 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5735
a594a19c
GK
5736 if (TARGET_DEBUG_ARG)
5737 {
f676971a 5738 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5739 cum->words, align);
5740 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5741 cum->nargs_prototype, cum->prototype,
2858f73a 5742 GET_MODE_NAME (mode));
a594a19c
GK
5743 }
5744 }
0ac081f6 5745 }
a4b0320c 5746 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5747 && !cum->stdarg
5748 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5749 cum->sysv_gregno++;
594a51fe
SS
5750
5751 else if (rs6000_darwin64_abi
5752 && mode == BLKmode
0b5383eb
DJ
5753 && TREE_CODE (type) == RECORD_TYPE
5754 && (size = int_size_in_bytes (type)) > 0)
5755 {
5756 /* Variable sized types have size == -1 and are
5757 treated as if consisting entirely of ints.
5758 Pad to 16 byte boundary if needed. */
5759 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5760 && (cum->words % 2) != 0)
5761 cum->words++;
5762 /* For varargs, we can just go up by the size of the struct. */
5763 if (!named)
5764 cum->words += (size + 7) / 8;
5765 else
5766 {
5767 /* It is tempting to say int register count just goes up by
5768 sizeof(type)/8, but this is wrong in a case such as
5769 { int; double; int; } [powerpc alignment]. We have to
5770 grovel through the fields for these too. */
5771 cum->intoffset = 0;
5772 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5773 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5774 size * BITS_PER_UNIT);
5775 }
5776 }
f607bc57 5777 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5778 {
a3170dc6 5779 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
5780 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5781 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5782 || (mode == TFmode && !TARGET_IEEEQUAD)
5783 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 5784 {
2d83f070
JJ
5785 /* _Decimal128 must use an even/odd register pair. This assumes
5786 that the register number is odd when fregno is odd. */
5787 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5788 cum->fregno++;
5789
5790 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5791 <= FP_ARG_V4_MAX_REG)
602ea4d3 5792 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5793 else
5794 {
602ea4d3 5795 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5796 if (mode == DFmode || mode == TFmode
5797 || mode == DDmode || mode == TDmode)
c4ad648e 5798 cum->words += cum->words & 1;
c53bdcf5 5799 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5800 }
4697a36c 5801 }
4cc833b7
RH
5802 else
5803 {
b2d04ecf 5804 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5805 int gregno = cum->sysv_gregno;
5806
4ed78545
AM
5807 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5808 (r7,r8) or (r9,r10). As does any other 2 word item such
5809 as complex int due to a historical mistake. */
5810 if (n_words == 2)
5811 gregno += (1 - gregno) & 1;
4cc833b7 5812
4ed78545 5813 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5814 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5815 {
4ed78545
AM
5816 /* Long long and SPE vectors are aligned on the stack.
5817 So are other 2 word items such as complex int due to
5818 a historical mistake. */
4cc833b7
RH
5819 if (n_words == 2)
5820 cum->words += cum->words & 1;
5821 cum->words += n_words;
5822 }
4697a36c 5823
4cc833b7
RH
5824 /* Note: continuing to accumulate gregno past when we've started
5825 spilling to the stack indicates the fact that we've started
5826 spilling to the stack to expand_builtin_saveregs. */
5827 cum->sysv_gregno = gregno + n_words;
5828 }
4697a36c 5829
4cc833b7
RH
5830 if (TARGET_DEBUG_ARG)
5831 {
5832 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5833 cum->words, cum->fregno);
5834 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5835 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5836 fprintf (stderr, "mode = %4s, named = %d\n",
5837 GET_MODE_NAME (mode), named);
5838 }
4697a36c
MM
5839 }
5840 else
4cc833b7 5841 {
b2d04ecf 5842 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5843 int start_words = cum->words;
5844 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5845
294bd182 5846 cum->words = align_words + n_words;
4697a36c 5847
ebb109ad 5848 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 5849 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5850 {
5851 /* _Decimal128 must be passed in an even/odd float register pair.
5852 This assumes that the register number is odd when fregno is
5853 odd. */
5854 if (mode == TDmode && (cum->fregno % 2) == 1)
5855 cum->fregno++;
5856 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5857 }
4cc833b7
RH
5858
5859 if (TARGET_DEBUG_ARG)
5860 {
5861 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5862 cum->words, cum->fregno);
5863 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5864 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5865 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5866 named, align_words - start_words, depth);
4cc833b7
RH
5867 }
5868 }
4697a36c 5869}
a6c9bed4 5870
f82f556d
AH
5871static rtx
5872spe_build_register_parallel (enum machine_mode mode, int gregno)
5873{
17caeff2 5874 rtx r1, r3, r5, r7;
f82f556d 5875
37409796 5876 switch (mode)
f82f556d 5877 {
37409796 5878 case DFmode:
54b695e7
AH
5879 r1 = gen_rtx_REG (DImode, gregno);
5880 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5881 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5882
5883 case DCmode:
17caeff2 5884 case TFmode:
54b695e7
AH
5885 r1 = gen_rtx_REG (DImode, gregno);
5886 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5887 r3 = gen_rtx_REG (DImode, gregno + 2);
5888 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5889 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5890
17caeff2
JM
5891 case TCmode:
5892 r1 = gen_rtx_REG (DImode, gregno);
5893 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5894 r3 = gen_rtx_REG (DImode, gregno + 2);
5895 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5896 r5 = gen_rtx_REG (DImode, gregno + 4);
5897 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5898 r7 = gen_rtx_REG (DImode, gregno + 6);
5899 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5900 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5901
37409796
NS
5902 default:
5903 gcc_unreachable ();
f82f556d 5904 }
f82f556d 5905}
b78d48dd 5906
f82f556d 5907/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5908static rtx
f676971a 5909rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5910 tree type)
a6c9bed4 5911{
f82f556d
AH
5912 int gregno = cum->sysv_gregno;
5913
5914 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5915 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5916 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5917 || mode == DCmode || mode == TCmode))
f82f556d 5918 {
b5870bee
AH
5919 int n_words = rs6000_arg_size (mode, type);
5920
f82f556d 5921 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5922 if (mode == DFmode)
b5870bee 5923 gregno += (1 - gregno) & 1;
f82f556d 5924
b5870bee
AH
5925 /* Multi-reg args are not split between registers and stack. */
5926 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5927 return NULL_RTX;
5928
5929 return spe_build_register_parallel (mode, gregno);
5930 }
a6c9bed4
AH
5931 if (cum->stdarg)
5932 {
c53bdcf5 5933 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5934
5935 /* SPE vectors are put in odd registers. */
5936 if (n_words == 2 && (gregno & 1) == 0)
5937 gregno += 1;
5938
5939 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5940 {
5941 rtx r1, r2;
5942 enum machine_mode m = SImode;
5943
5944 r1 = gen_rtx_REG (m, gregno);
5945 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5946 r2 = gen_rtx_REG (m, gregno + 1);
5947 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5948 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5949 }
5950 else
b78d48dd 5951 return NULL_RTX;
a6c9bed4
AH
5952 }
5953 else
5954 {
f82f556d
AH
5955 if (gregno <= GP_ARG_MAX_REG)
5956 return gen_rtx_REG (mode, gregno);
a6c9bed4 5957 else
b78d48dd 5958 return NULL_RTX;
a6c9bed4
AH
5959 }
5960}
5961
0b5383eb
DJ
5962/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5963 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5964
0b5383eb 5965static void
bb8df8a6 5966rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5967 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5968{
0b5383eb
DJ
5969 enum machine_mode mode;
5970 unsigned int regno;
5971 unsigned int startbit, endbit;
5972 int this_regno, intregs, intoffset;
5973 rtx reg;
594a51fe 5974
0b5383eb
DJ
5975 if (cum->intoffset == -1)
5976 return;
5977
5978 intoffset = cum->intoffset;
5979 cum->intoffset = -1;
5980
5981 /* If this is the trailing part of a word, try to only load that
5982 much into the register. Otherwise load the whole register. Note
5983 that in the latter case we may pick up unwanted bits. It's not a
5984 problem at the moment but may wish to revisit. */
5985
5986 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5987 {
0b5383eb
DJ
5988 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5989 MODE_INT, 0);
5990 if (mode == BLKmode)
5991 {
5992 /* We couldn't find an appropriate mode, which happens,
5993 e.g., in packed structs when there are 3 bytes to load.
5994 Back intoffset back to the beginning of the word in this
5995 case. */
5996 intoffset = intoffset & -BITS_PER_WORD;
5997 mode = word_mode;
5998 }
5999 }
6000 else
6001 mode = word_mode;
6002
6003 startbit = intoffset & -BITS_PER_WORD;
6004 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
6005 intregs = (endbit - startbit) / BITS_PER_WORD;
6006 this_regno = cum->words + intoffset / BITS_PER_WORD;
6007
6008 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6009 cum->use_stack = 1;
bb8df8a6 6010
0b5383eb
DJ
6011 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6012 if (intregs <= 0)
6013 return;
6014
6015 intoffset /= BITS_PER_UNIT;
6016 do
6017 {
6018 regno = GP_ARG_MIN_REG + this_regno;
6019 reg = gen_rtx_REG (mode, regno);
6020 rvec[(*k)++] =
6021 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6022
6023 this_regno += 1;
6024 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6025 mode = word_mode;
6026 intregs -= 1;
6027 }
6028 while (intregs > 0);
6029}
6030
6031/* Recursive workhorse for the following. */
6032
6033static void
586de218 6034rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6035 HOST_WIDE_INT startbitpos, rtx rvec[],
6036 int *k)
6037{
6038 tree f;
6039
6040 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6041 if (TREE_CODE (f) == FIELD_DECL)
6042 {
6043 HOST_WIDE_INT bitpos = startbitpos;
6044 tree ftype = TREE_TYPE (f);
70fb00df
AP
6045 enum machine_mode mode;
6046 if (ftype == error_mark_node)
6047 continue;
6048 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6049
6050 if (DECL_SIZE (f) != 0
6051 && host_integerp (bit_position (f), 1))
6052 bitpos += int_bit_position (f);
6053
6054 /* ??? FIXME: else assume zero offset. */
6055
6056 if (TREE_CODE (ftype) == RECORD_TYPE)
6057 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6058 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6059 {
0b5383eb
DJ
6060#if 0
6061 switch (mode)
594a51fe 6062 {
0b5383eb
DJ
6063 case SCmode: mode = SFmode; break;
6064 case DCmode: mode = DFmode; break;
6065 case TCmode: mode = TFmode; break;
6066 default: break;
594a51fe 6067 }
0b5383eb
DJ
6068#endif
6069 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6070 rvec[(*k)++]
bb8df8a6 6071 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6072 gen_rtx_REG (mode, cum->fregno++),
6073 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6074 if (mode == TFmode || mode == TDmode)
0b5383eb 6075 cum->fregno++;
594a51fe 6076 }
0b5383eb
DJ
6077 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6078 {
6079 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6080 rvec[(*k)++]
bb8df8a6
EC
6081 = gen_rtx_EXPR_LIST (VOIDmode,
6082 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6083 GEN_INT (bitpos / BITS_PER_UNIT));
6084 }
6085 else if (cum->intoffset == -1)
6086 cum->intoffset = bitpos;
6087 }
6088}
594a51fe 6089
0b5383eb
DJ
6090/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6091 the register(s) to be used for each field and subfield of a struct
6092 being passed by value, along with the offset of where the
6093 register's value may be found in the block. FP fields go in FP
6094 register, vector fields go in vector registers, and everything
bb8df8a6 6095 else goes in int registers, packed as in memory.
8ff40a74 6096
0b5383eb
DJ
6097 This code is also used for function return values. RETVAL indicates
6098 whether this is the case.
8ff40a74 6099
a4d05547 6100 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6101 calling convention. */
594a51fe 6102
0b5383eb 6103static rtx
586de218 6104rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6105 int named, bool retval)
6106{
6107 rtx rvec[FIRST_PSEUDO_REGISTER];
6108 int k = 1, kbase = 1;
6109 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6110 /* This is a copy; modifications are not visible to our caller. */
6111 CUMULATIVE_ARGS copy_cum = *orig_cum;
6112 CUMULATIVE_ARGS *cum = &copy_cum;
6113
6114 /* Pad to 16 byte boundary if needed. */
6115 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6116 && (cum->words % 2) != 0)
6117 cum->words++;
6118
6119 cum->intoffset = 0;
6120 cum->use_stack = 0;
6121 cum->named = named;
6122
6123 /* Put entries into rvec[] for individual FP and vector fields, and
6124 for the chunks of memory that go in int regs. Note we start at
6125 element 1; 0 is reserved for an indication of using memory, and
6126 may or may not be filled in below. */
6127 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6128 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6129
6130 /* If any part of the struct went on the stack put all of it there.
6131 This hack is because the generic code for
6132 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6133 parts of the struct are not at the beginning. */
6134 if (cum->use_stack)
6135 {
6136 if (retval)
6137 return NULL_RTX; /* doesn't go in registers at all */
6138 kbase = 0;
6139 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6140 }
6141 if (k > 1 || cum->use_stack)
6142 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6143 else
6144 return NULL_RTX;
6145}
6146
b78d48dd
FJ
6147/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6148
6149static rtx
ec6376ab 6150rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6151{
ec6376ab
AM
6152 int n_units;
6153 int i, k;
6154 rtx rvec[GP_ARG_NUM_REG + 1];
6155
6156 if (align_words >= GP_ARG_NUM_REG)
6157 return NULL_RTX;
6158
6159 n_units = rs6000_arg_size (mode, type);
6160
6161 /* Optimize the simple case where the arg fits in one gpr, except in
6162 the case of BLKmode due to assign_parms assuming that registers are
6163 BITS_PER_WORD wide. */
6164 if (n_units == 0
6165 || (n_units == 1 && mode != BLKmode))
6166 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6167
6168 k = 0;
6169 if (align_words + n_units > GP_ARG_NUM_REG)
6170 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6171 using a magic NULL_RTX component.
79773478
AM
6172 This is not strictly correct. Only some of the arg belongs in
6173 memory, not all of it. However, the normal scheme using
6174 function_arg_partial_nregs can result in unusual subregs, eg.
6175 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6176 store the whole arg to memory is often more efficient than code
6177 to store pieces, and we know that space is available in the right
6178 place for the whole arg. */
ec6376ab
AM
6179 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6180
6181 i = 0;
6182 do
36a454e1 6183 {
ec6376ab
AM
6184 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6185 rtx off = GEN_INT (i++ * 4);
6186 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6187 }
ec6376ab
AM
6188 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6189
6190 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6191}
6192
4697a36c
MM
6193/* Determine where to put an argument to a function.
6194 Value is zero to push the argument on the stack,
6195 or a hard register in which to store the argument.
6196
6197 MODE is the argument's machine mode.
6198 TYPE is the data type of the argument (as a tree).
6199 This is null for libcalls where that information may
6200 not be available.
6201 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6202 the preceding args and about the function being called. It is
6203 not modified in this routine.
4697a36c
MM
6204 NAMED is nonzero if this argument is a named parameter
6205 (otherwise it is an extra parameter matching an ellipsis).
6206
6207 On RS/6000 the first eight words of non-FP are normally in registers
6208 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6209 Under V.4, the first 8 FP args are in registers.
6210
6211 If this is floating-point and no prototype is specified, we use
6212 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6213 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6214 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6215 doesn't support PARALLEL anyway.
6216
6217 Note that for args passed by reference, function_arg will be called
6218 with MODE and TYPE set to that of the pointer to the arg, not the arg
6219 itself. */
4697a36c 6220
9390387d 6221rtx
f676971a 6222function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6223 tree type, int named)
4697a36c 6224{
4cc833b7 6225 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6226
a4f6c312
SS
6227 /* Return a marker to indicate whether CR1 needs to set or clear the
6228 bit that V.4 uses to say fp args were passed in registers.
6229 Assume that we don't need the marker for software floating point,
6230 or compiler generated library calls. */
4697a36c
MM
6231 if (mode == VOIDmode)
6232 {
f607bc57 6233 if (abi == ABI_V4
b9599e46 6234 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6235 && (cum->stdarg
6236 || (cum->nargs_prototype < 0
6237 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6238 {
a3170dc6
AH
6239 /* For the SPE, we need to crxor CR6 always. */
6240 if (TARGET_SPE_ABI)
6241 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6242 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6243 return GEN_INT (cum->call_cookie
6244 | ((cum->fregno == FP_ARG_MIN_REG)
6245 ? CALL_V4_SET_FP_ARGS
6246 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6247 }
4697a36c 6248
7509c759 6249 return GEN_INT (cum->call_cookie);
4697a36c
MM
6250 }
6251
0b5383eb
DJ
6252 if (rs6000_darwin64_abi && mode == BLKmode
6253 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6254 {
0b5383eb 6255 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6256 if (rslt != NULL_RTX)
6257 return rslt;
6258 /* Else fall through to usual handling. */
6259 }
6260
2858f73a 6261 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6262 if (TARGET_64BIT && ! cum->prototype)
6263 {
c4ad648e
AM
6264 /* Vector parameters get passed in vector register
6265 and also in GPRs or memory, in absence of prototype. */
6266 int align_words;
6267 rtx slot;
6268 align_words = (cum->words + 1) & ~1;
6269
6270 if (align_words >= GP_ARG_NUM_REG)
6271 {
6272 slot = NULL_RTX;
6273 }
6274 else
6275 {
6276 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6277 }
6278 return gen_rtx_PARALLEL (mode,
6279 gen_rtvec (2,
6280 gen_rtx_EXPR_LIST (VOIDmode,
6281 slot, const0_rtx),
6282 gen_rtx_EXPR_LIST (VOIDmode,
6283 gen_rtx_REG (mode, cum->vregno),
6284 const0_rtx)));
c72d6c26
HP
6285 }
6286 else
6287 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6288 else if (TARGET_ALTIVEC_ABI
6289 && (ALTIVEC_VECTOR_MODE (mode)
6290 || (type && TREE_CODE (type) == VECTOR_TYPE
6291 && int_size_in_bytes (type) == 16)))
0ac081f6 6292 {
2858f73a 6293 if (named || abi == ABI_V4)
a594a19c 6294 return NULL_RTX;
0ac081f6 6295 else
a594a19c
GK
6296 {
6297 /* Vector parameters to varargs functions under AIX or Darwin
6298 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6299 int align, align_words, n_words;
6300 enum machine_mode part_mode;
a594a19c
GK
6301
6302 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6303 2 mod 4 in terms of words in 32-bit mode, since the parameter
6304 save area starts at offset 24 from the stack. In 64-bit mode,
6305 they just have to start on an even word, since the parameter
6306 save area is 16-byte aligned. */
6307 if (TARGET_32BIT)
4ed78545 6308 align = (2 - cum->words) & 3;
2858f73a
GK
6309 else
6310 align = cum->words & 1;
a594a19c
GK
6311 align_words = cum->words + align;
6312
6313 /* Out of registers? Memory, then. */
6314 if (align_words >= GP_ARG_NUM_REG)
6315 return NULL_RTX;
ec6376ab
AM
6316
6317 if (TARGET_32BIT && TARGET_POWERPC64)
6318 return rs6000_mixed_function_arg (mode, type, align_words);
6319
2858f73a
GK
6320 /* The vector value goes in GPRs. Only the part of the
6321 value in GPRs is reported here. */
ec6376ab
AM
6322 part_mode = mode;
6323 n_words = rs6000_arg_size (mode, type);
6324 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6325 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6326 is either wholly in GPRs or half in GPRs and half not. */
6327 part_mode = DImode;
ec6376ab
AM
6328
6329 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6330 }
0ac081f6 6331 }
f82f556d
AH
6332 else if (TARGET_SPE_ABI && TARGET_SPE
6333 && (SPE_VECTOR_MODE (mode)
18f63bfa 6334 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6335 || mode == DCmode
6336 || mode == TFmode
6337 || mode == TCmode))))
a6c9bed4 6338 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6339
f607bc57 6340 else if (abi == ABI_V4)
4697a36c 6341 {
a3170dc6 6342 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6343 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6344 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6345 || (mode == TFmode && !TARGET_IEEEQUAD)
6346 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6347 {
2d83f070
JJ
6348 /* _Decimal128 must use an even/odd register pair. This assumes
6349 that the register number is odd when fregno is odd. */
6350 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6351 cum->fregno++;
6352
6353 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6354 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6355 return gen_rtx_REG (mode, cum->fregno);
6356 else
b78d48dd 6357 return NULL_RTX;
4cc833b7
RH
6358 }
6359 else
6360 {
b2d04ecf 6361 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6362 int gregno = cum->sysv_gregno;
6363
4ed78545
AM
6364 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6365 (r7,r8) or (r9,r10). As does any other 2 word item such
6366 as complex int due to a historical mistake. */
6367 if (n_words == 2)
6368 gregno += (1 - gregno) & 1;
4cc833b7 6369
4ed78545 6370 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6371 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6372 return NULL_RTX;
ec6376ab
AM
6373
6374 if (TARGET_32BIT && TARGET_POWERPC64)
6375 return rs6000_mixed_function_arg (mode, type,
6376 gregno - GP_ARG_MIN_REG);
6377 return gen_rtx_REG (mode, gregno);
4cc833b7 6378 }
4697a36c 6379 }
4cc833b7
RH
6380 else
6381 {
294bd182 6382 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6383
2d83f070
JJ
6384 /* _Decimal128 must be passed in an even/odd float register pair.
6385 This assumes that the register number is odd when fregno is odd. */
6386 if (mode == TDmode && (cum->fregno % 2) == 1)
6387 cum->fregno++;
6388
2858f73a 6389 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6390 {
ec6376ab
AM
6391 rtx rvec[GP_ARG_NUM_REG + 1];
6392 rtx r;
6393 int k;
c53bdcf5
AM
6394 bool needs_psave;
6395 enum machine_mode fmode = mode;
c53bdcf5
AM
6396 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6397
6398 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6399 {
c53bdcf5
AM
6400 /* Currently, we only ever need one reg here because complex
6401 doubles are split. */
7393f7f8
BE
6402 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6403 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6404
7393f7f8
BE
6405 /* Long double or _Decimal128 split over regs and memory. */
6406 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6407 }
c53bdcf5
AM
6408
6409 /* Do we also need to pass this arg in the parameter save
6410 area? */
6411 needs_psave = (type
6412 && (cum->nargs_prototype <= 0
6413 || (DEFAULT_ABI == ABI_AIX
de17c25f 6414 && TARGET_XL_COMPAT
c53bdcf5
AM
6415 && align_words >= GP_ARG_NUM_REG)));
6416
6417 if (!needs_psave && mode == fmode)
ec6376ab 6418 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6419
ec6376ab 6420 k = 0;
c53bdcf5
AM
6421 if (needs_psave)
6422 {
ec6376ab 6423 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6424 This piece must come first, before the fprs. */
c53bdcf5
AM
6425 if (align_words < GP_ARG_NUM_REG)
6426 {
6427 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6428
6429 if (align_words + n_words > GP_ARG_NUM_REG
6430 || (TARGET_32BIT && TARGET_POWERPC64))
6431 {
6432 /* If this is partially on the stack, then we only
6433 include the portion actually in registers here. */
6434 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6435 rtx off;
79773478
AM
6436 int i = 0;
6437 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6438 /* Not all of the arg fits in gprs. Say that it
6439 goes in memory too, using a magic NULL_RTX
6440 component. Also see comment in
6441 rs6000_mixed_function_arg for why the normal
6442 function_arg_partial_nregs scheme doesn't work
6443 in this case. */
6444 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6445 const0_rtx);
ec6376ab
AM
6446 do
6447 {
6448 r = gen_rtx_REG (rmode,
6449 GP_ARG_MIN_REG + align_words);
2e6c9641 6450 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6451 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6452 }
6453 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6454 }
6455 else
6456 {
6457 /* The whole arg fits in gprs. */
6458 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6459 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6460 }
c53bdcf5 6461 }
ec6376ab
AM
6462 else
6463 /* It's entirely in memory. */
6464 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6465 }
6466
ec6376ab
AM
6467 /* Describe where this piece goes in the fprs. */
6468 r = gen_rtx_REG (fmode, cum->fregno);
6469 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6470
6471 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6472 }
6473 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6474 {
ec6376ab
AM
6475 if (TARGET_32BIT && TARGET_POWERPC64)
6476 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6477
4eeca74f
AM
6478 if (mode == BLKmode)
6479 mode = Pmode;
6480
b2d04ecf
AM
6481 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6482 }
4cc833b7
RH
6483 else
6484 return NULL_RTX;
4697a36c 6485 }
4697a36c
MM
6486}
6487\f
ec6376ab 6488/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6489 the number of bytes passed in registers. For args passed entirely in
6490 registers or entirely in memory, zero. When an arg is described by a
6491 PARALLEL, perhaps using more than one register type, this function
6492 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6493
78a52f11
RH
6494static int
6495rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6496 tree type, bool named)
4697a36c 6497{
c53bdcf5 6498 int ret = 0;
ec6376ab 6499 int align_words;
c53bdcf5 6500
f607bc57 6501 if (DEFAULT_ABI == ABI_V4)
4697a36c 6502 return 0;
4697a36c 6503
c53bdcf5
AM
6504 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6505 && cum->nargs_prototype >= 0)
6506 return 0;
6507
0b5383eb
DJ
6508 /* In this complicated case we just disable the partial_nregs code. */
6509 if (rs6000_darwin64_abi && mode == BLKmode
6510 && TREE_CODE (type) == RECORD_TYPE
6511 && int_size_in_bytes (type) > 0)
6512 return 0;
6513
294bd182 6514 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6515
79773478
AM
6516 if (USE_FP_FOR_ARG_P (cum, mode, type))
6517 {
fb63c729
AM
6518 /* If we are passing this arg in the fixed parameter save area
6519 (gprs or memory) as well as fprs, then this function should
79773478
AM
6520 return the number of partial bytes passed in the parameter
6521 save area rather than partial bytes passed in fprs. */
6522 if (type
6523 && (cum->nargs_prototype <= 0
6524 || (DEFAULT_ABI == ABI_AIX
6525 && TARGET_XL_COMPAT
6526 && align_words >= GP_ARG_NUM_REG)))
6527 return 0;
6528 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6529 > FP_ARG_MAX_REG + 1)
ac7e839c 6530 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6531 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6532 return 0;
6533 }
6534
ec6376ab
AM
6535 if (align_words < GP_ARG_NUM_REG
6536 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6537 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6538
c53bdcf5 6539 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6540 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6541
c53bdcf5 6542 return ret;
4697a36c
MM
6543}
6544\f
6545/* A C expression that indicates when an argument must be passed by
6546 reference. If nonzero for an argument, a copy of that argument is
6547 made in memory and a pointer to the argument is passed instead of
6548 the argument itself. The pointer is passed in whatever way is
6549 appropriate for passing a pointer to that type.
6550
b2d04ecf
AM
6551 Under V.4, aggregates and long double are passed by reference.
6552
6553 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6554 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6555
6556 As an extension to all ABIs, variable sized types are passed by
6557 reference. */
4697a36c 6558
8cd5a4e0 6559static bool
f676971a 6560rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6561 enum machine_mode mode, const_tree type,
bada2eb8 6562 bool named ATTRIBUTE_UNUSED)
4697a36c 6563{
602ea4d3 6564 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6565 {
6566 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6567 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6568 return 1;
6569 }
6570
6571 if (!type)
6572 return 0;
4697a36c 6573
bada2eb8
DE
6574 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6575 {
6576 if (TARGET_DEBUG_ARG)
6577 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6578 return 1;
6579 }
6580
6581 if (int_size_in_bytes (type) < 0)
6582 {
6583 if (TARGET_DEBUG_ARG)
6584 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6585 return 1;
6586 }
6587
6588 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6589 modes only exist for GCC vector types if -maltivec. */
6590 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6591 {
6592 if (TARGET_DEBUG_ARG)
6593 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6594 return 1;
6595 }
b693336b
PB
6596
6597 /* Pass synthetic vectors in memory. */
bada2eb8 6598 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6599 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6600 {
6601 static bool warned_for_pass_big_vectors = false;
6602 if (TARGET_DEBUG_ARG)
6603 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6604 if (!warned_for_pass_big_vectors)
6605 {
d4ee4d25 6606 warning (0, "GCC vector passed by reference: "
b693336b
PB
6607 "non-standard ABI extension with no compatibility guarantee");
6608 warned_for_pass_big_vectors = true;
6609 }
6610 return 1;
6611 }
6612
b2d04ecf 6613 return 0;
4697a36c 6614}
5985c7a6
FJ
6615
6616static void
2d9db8eb 6617rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6618{
6619 int i;
6620 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6621
6622 if (nregs == 0)
6623 return;
6624
c4ad648e 6625 for (i = 0; i < nregs; i++)
5985c7a6 6626 {
9390387d 6627 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6628 if (reload_completed)
c4ad648e
AM
6629 {
6630 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6631 tem = NULL_RTX;
6632 else
6633 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6634 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6635 }
5985c7a6
FJ
6636 else
6637 tem = replace_equiv_address (tem, XEXP (tem, 0));
6638
37409796 6639 gcc_assert (tem);
5985c7a6
FJ
6640
6641 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6642 }
6643}
4697a36c
MM
6644\f
6645/* Perform any needed actions needed for a function that is receiving a
f676971a 6646 variable number of arguments.
4697a36c
MM
6647
6648 CUM is as above.
6649
6650 MODE and TYPE are the mode and type of the current parameter.
6651
6652 PRETEND_SIZE is a variable that should be set to the amount of stack
6653 that must be pushed by the prolog to pretend that our caller pushed
6654 it.
6655
6656 Normally, this macro will push all remaining incoming registers on the
6657 stack and set PRETEND_SIZE to the length of the registers pushed. */
6658
c6e8c921 6659static void
f676971a 6660setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6661 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6662 int no_rtl)
4697a36c 6663{
4cc833b7
RH
6664 CUMULATIVE_ARGS next_cum;
6665 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6666 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6667 int first_reg_offset;
6668 alias_set_type set;
4697a36c 6669
f31bf321 6670 /* Skip the last named argument. */
d34c5b80 6671 next_cum = *cum;
594a51fe 6672 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6673
f607bc57 6674 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6675 {
5b667039
JJ
6676 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6677
60e2d0ca 6678 if (! no_rtl)
5b667039
JJ
6679 {
6680 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6681 HOST_WIDE_INT offset = 0;
6682
6683 /* Try to optimize the size of the varargs save area.
6684 The ABI requires that ap.reg_save_area is doubleword
6685 aligned, but we don't need to allocate space for all
6686 the bytes, only those to which we actually will save
6687 anything. */
6688 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6689 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6690 if (TARGET_HARD_FLOAT && TARGET_FPRS
6691 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6692 && cfun->va_list_fpr_size)
6693 {
6694 if (gpr_reg_num)
6695 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6696 * UNITS_PER_FP_WORD;
6697 if (cfun->va_list_fpr_size
6698 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6699 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6700 else
6701 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6702 * UNITS_PER_FP_WORD;
6703 }
6704 if (gpr_reg_num)
6705 {
6706 offset = -((first_reg_offset * reg_size) & ~7);
6707 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6708 {
6709 gpr_reg_num = cfun->va_list_gpr_size;
6710 if (reg_size == 4 && (first_reg_offset & 1))
6711 gpr_reg_num++;
6712 }
6713 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6714 }
6715 else if (fpr_size)
6716 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6717 * UNITS_PER_FP_WORD
6718 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6719
5b667039
JJ
6720 if (gpr_size + fpr_size)
6721 {
6722 rtx reg_save_area
6723 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6724 gcc_assert (GET_CODE (reg_save_area) == MEM);
6725 reg_save_area = XEXP (reg_save_area, 0);
6726 if (GET_CODE (reg_save_area) == PLUS)
6727 {
6728 gcc_assert (XEXP (reg_save_area, 0)
6729 == virtual_stack_vars_rtx);
6730 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6731 offset += INTVAL (XEXP (reg_save_area, 1));
6732 }
6733 else
6734 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6735 }
6736
6737 cfun->machine->varargs_save_offset = offset;
6738 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6739 }
4697a36c 6740 }
60e2d0ca 6741 else
4697a36c 6742 {
d34c5b80 6743 first_reg_offset = next_cum.words;
4cc833b7 6744 save_area = virtual_incoming_args_rtx;
4697a36c 6745
fe984136 6746 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6747 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6748 }
4697a36c 6749
dfafc897 6750 set = get_varargs_alias_set ();
9d30f3c1
JJ
6751 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6752 && cfun->va_list_gpr_size)
4cc833b7 6753 {
9d30f3c1
JJ
6754 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6755
6756 if (va_list_gpr_counter_field)
6757 {
6758 /* V4 va_list_gpr_size counts number of registers needed. */
6759 if (nregs > cfun->va_list_gpr_size)
6760 nregs = cfun->va_list_gpr_size;
6761 }
6762 else
6763 {
6764 /* char * va_list instead counts number of bytes needed. */
6765 if (nregs > cfun->va_list_gpr_size / reg_size)
6766 nregs = cfun->va_list_gpr_size / reg_size;
6767 }
6768
dfafc897 6769 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6770 plus_constant (save_area,
13e2e16e
DE
6771 first_reg_offset * reg_size));
6772 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6773 set_mem_alias_set (mem, set);
8ac61af7 6774 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6775
f676971a 6776 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6777 nregs);
4697a36c
MM
6778 }
6779
4697a36c 6780 /* Save FP registers if needed. */
f607bc57 6781 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6782 && TARGET_HARD_FLOAT && TARGET_FPRS
6783 && ! no_rtl
9d30f3c1
JJ
6784 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6785 && cfun->va_list_fpr_size)
4697a36c 6786 {
9d30f3c1 6787 int fregno = next_cum.fregno, nregs;
9ebbca7d 6788 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6789 rtx lab = gen_label_rtx ();
5b667039
JJ
6790 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6791 * UNITS_PER_FP_WORD);
4697a36c 6792
c4ad648e
AM
6793 emit_jump_insn
6794 (gen_rtx_SET (VOIDmode,
6795 pc_rtx,
6796 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6797 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6798 const0_rtx),
39403d82 6799 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6800 pc_rtx)));
6801
9d30f3c1
JJ
6802 for (nregs = 0;
6803 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6804 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6805 {
696e45ba
ME
6806 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6807 ? DFmode : SFmode,
6808 plus_constant (save_area, off));
6809 MEM_NOTRAP_P (mem) = 1;
6810 set_mem_alias_set (mem, set);
6811 set_mem_align (mem, GET_MODE_ALIGNMENT (
6812 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6813 ? DFmode : SFmode));
6814 emit_move_insn (mem, gen_rtx_REG (
6815 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6816 ? DFmode : SFmode, fregno));
4697a36c 6817 }
4cc833b7
RH
6818
6819 emit_label (lab);
4697a36c 6820 }
4697a36c 6821}
4697a36c 6822
dfafc897 6823/* Create the va_list data type. */
2c4974b7 6824
c35d187f
RH
6825static tree
6826rs6000_build_builtin_va_list (void)
dfafc897 6827{
64c2816f 6828 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6829
9ebbca7d
GK
6830 /* For AIX, prefer 'char *' because that's what the system
6831 header files like. */
f607bc57 6832 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6833 return build_pointer_type (char_type_node);
dfafc897 6834
f1e639b1 6835 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6836 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6837
f676971a 6838 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6839 unsigned_char_type_node);
f676971a 6840 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6841 unsigned_char_type_node);
64c2816f
DT
6842 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6843 every user file. */
6844 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6845 short_unsigned_type_node);
dfafc897
FS
6846 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6847 ptr_type_node);
6848 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6849 ptr_type_node);
6850
9d30f3c1
JJ
6851 va_list_gpr_counter_field = f_gpr;
6852 va_list_fpr_counter_field = f_fpr;
6853
dfafc897
FS
6854 DECL_FIELD_CONTEXT (f_gpr) = record;
6855 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6856 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6857 DECL_FIELD_CONTEXT (f_ovf) = record;
6858 DECL_FIELD_CONTEXT (f_sav) = record;
6859
bab45a51
FS
6860 TREE_CHAIN (record) = type_decl;
6861 TYPE_NAME (record) = type_decl;
dfafc897
FS
6862 TYPE_FIELDS (record) = f_gpr;
6863 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6864 TREE_CHAIN (f_fpr) = f_res;
6865 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6866 TREE_CHAIN (f_ovf) = f_sav;
6867
6868 layout_type (record);
6869
6870 /* The correct type is an array type of one element. */
6871 return build_array_type (record, build_index_type (size_zero_node));
6872}
6873
6874/* Implement va_start. */
6875
d7bd8aeb 6876static void
a2369ed3 6877rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6878{
dfafc897 6879 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6880 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6881 tree gpr, fpr, ovf, sav, t;
2c4974b7 6882
dfafc897 6883 /* Only SVR4 needs something special. */
f607bc57 6884 if (DEFAULT_ABI != ABI_V4)
dfafc897 6885 {
e5faf155 6886 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6887 return;
6888 }
6889
973a648b 6890 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6891 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6892 f_res = TREE_CHAIN (f_fpr);
6893 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6894 f_sav = TREE_CHAIN (f_ovf);
6895
872a65b5 6896 valist = build_va_arg_indirect_ref (valist);
47a25a46 6897 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6898 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6899 f_fpr, NULL_TREE);
6900 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6901 f_ovf, NULL_TREE);
6902 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6903 f_sav, NULL_TREE);
dfafc897
FS
6904
6905 /* Count number of gp and fp argument registers used. */
38173d38
JH
6906 words = crtl->args.info.words;
6907 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6908 GP_ARG_NUM_REG);
38173d38 6909 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6910 FP_ARG_NUM_REG);
dfafc897
FS
6911
6912 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6913 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6914 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6915 words, n_gpr, n_fpr);
dfafc897 6916
9d30f3c1
JJ
6917 if (cfun->va_list_gpr_size)
6918 {
726a989a 6919 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6920 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6921 TREE_SIDE_EFFECTS (t) = 1;
6922 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6923 }
58c8adc1 6924
9d30f3c1
JJ
6925 if (cfun->va_list_fpr_size)
6926 {
726a989a 6927 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6928 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6929 TREE_SIDE_EFFECTS (t) = 1;
6930 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6931 }
dfafc897
FS
6932
6933 /* Find the overflow area. */
6934 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6935 if (words != 0)
5be014d5
AP
6936 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6937 size_int (words * UNITS_PER_WORD));
726a989a 6938 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6939 TREE_SIDE_EFFECTS (t) = 1;
6940 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6941
9d30f3c1
JJ
6942 /* If there were no va_arg invocations, don't set up the register
6943 save area. */
6944 if (!cfun->va_list_gpr_size
6945 && !cfun->va_list_fpr_size
6946 && n_gpr < GP_ARG_NUM_REG
6947 && n_fpr < FP_ARG_V4_MAX_REG)
6948 return;
6949
dfafc897
FS
6950 /* Find the register save area. */
6951 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6952 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6953 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6954 size_int (cfun->machine->varargs_save_offset));
726a989a 6955 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6956 TREE_SIDE_EFFECTS (t) = 1;
6957 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6958}
6959
6960/* Implement va_arg. */
6961
23a60a04 6962tree
726a989a
RB
6963rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6964 gimple_seq *post_p)
cd3ce9b4 6965{
cd3ce9b4
JM
6966 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6967 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6968 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6969 tree lab_false, lab_over, addr;
6970 int align;
6971 tree ptrtype = build_pointer_type (type);
7393f7f8 6972 int regalign = 0;
726a989a 6973 gimple stmt;
cd3ce9b4 6974
08b0dc1b
RH
6975 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6976 {
6977 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6978 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6979 }
6980
cd3ce9b4
JM
6981 if (DEFAULT_ABI != ABI_V4)
6982 {
08b0dc1b 6983 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6984 {
6985 tree elem_type = TREE_TYPE (type);
6986 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6987 int elem_size = GET_MODE_SIZE (elem_mode);
6988
6989 if (elem_size < UNITS_PER_WORD)
6990 {
23a60a04 6991 tree real_part, imag_part;
726a989a 6992 gimple_seq post = NULL;
cd3ce9b4 6993
23a60a04
JM
6994 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6995 &post);
6996 /* Copy the value into a temporary, lest the formal temporary
6997 be reused out from under us. */
6998 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 6999 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 7000
23a60a04
JM
7001 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7002 post_p);
cd3ce9b4 7003
47a25a46 7004 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
7005 }
7006 }
7007
23a60a04 7008 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
7009 }
7010
7011 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7012 f_fpr = TREE_CHAIN (f_gpr);
7013 f_res = TREE_CHAIN (f_fpr);
7014 f_ovf = TREE_CHAIN (f_res);
7015 f_sav = TREE_CHAIN (f_ovf);
7016
872a65b5 7017 valist = build_va_arg_indirect_ref (valist);
47a25a46 7018 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7019 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7020 f_fpr, NULL_TREE);
7021 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7022 f_ovf, NULL_TREE);
7023 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7024 f_sav, NULL_TREE);
cd3ce9b4
JM
7025
7026 size = int_size_in_bytes (type);
7027 rsize = (size + 3) / 4;
7028 align = 1;
7029
08b0dc1b 7030 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7031 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7032 || (TARGET_DOUBLE_FLOAT
7033 && (TYPE_MODE (type) == DFmode
7034 || TYPE_MODE (type) == TFmode
7035 || TYPE_MODE (type) == SDmode
7036 || TYPE_MODE (type) == DDmode
7037 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7038 {
7039 /* FP args go in FP registers, if present. */
cd3ce9b4 7040 reg = fpr;
602ea4d3 7041 n_reg = (size + 7) / 8;
696e45ba
ME
7042 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7043 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7044 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7045 align = 8;
7046 }
7047 else
7048 {
7049 /* Otherwise into GP registers. */
cd3ce9b4
JM
7050 reg = gpr;
7051 n_reg = rsize;
7052 sav_ofs = 0;
7053 sav_scale = 4;
7054 if (n_reg == 2)
7055 align = 8;
7056 }
7057
7058 /* Pull the value out of the saved registers.... */
7059
7060 lab_over = NULL;
7061 addr = create_tmp_var (ptr_type_node, "addr");
7062 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7063
7064 /* AltiVec vectors never go in registers when -mabi=altivec. */
7065 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7066 align = 16;
7067 else
7068 {
7069 lab_false = create_artificial_label ();
7070 lab_over = create_artificial_label ();
7071
7072 /* Long long and SPE vectors are aligned in the registers.
7073 As are any other 2 gpr item such as complex int due to a
7074 historical mistake. */
7075 u = reg;
602ea4d3 7076 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7077 {
7393f7f8 7078 regalign = 1;
726a989a 7079 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7080 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7081 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7082 unshare_expr (reg), u);
cd3ce9b4 7083 }
7393f7f8
BE
7084 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7085 reg number is 0 for f1, so we want to make it odd. */
7086 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7087 {
726a989a 7088 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7089 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7090 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7091 }
cd3ce9b4 7092
95674810 7093 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7094 t = build2 (GE_EXPR, boolean_type_node, u, t);
7095 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7096 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7097 gimplify_and_add (t, pre_p);
7098
7099 t = sav;
7100 if (sav_ofs)
5be014d5 7101 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7102
726a989a 7103 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7104 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7105 u = fold_convert (sizetype, u);
7106 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7107 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7108
e41b2a33
PB
7109 /* _Decimal32 varargs are located in the second word of the 64-bit
7110 FP register for 32-bit binaries. */
4f011e1e
JM
7111 if (!TARGET_POWERPC64
7112 && TARGET_HARD_FLOAT && TARGET_FPRS
7113 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7114 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7115
726a989a 7116 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7117
726a989a 7118 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7119
726a989a
RB
7120 stmt = gimple_build_label (lab_false);
7121 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7122
7393f7f8 7123 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7124 {
7125 /* Ensure that we don't find any more args in regs.
7393f7f8 7126 Alignment has taken care of for special cases. */
726a989a 7127 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7128 }
7129 }
7130
7131 /* ... otherwise out of the overflow area. */
7132
7133 /* Care for on-stack alignment if needed. */
7134 t = ovf;
7135 if (align != 1)
7136 {
5be014d5
AP
7137 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7138 t = fold_convert (sizetype, t);
4a90aeeb 7139 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7140 size_int (-align));
7141 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7142 }
7143 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7144
726a989a 7145 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7146
5be014d5 7147 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7148 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7149
7150 if (lab_over)
7151 {
726a989a
RB
7152 stmt = gimple_build_label (lab_over);
7153 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7154 }
7155
0cfbc62b
JM
7156 if (STRICT_ALIGNMENT
7157 && (TYPE_ALIGN (type)
7158 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7159 {
7160 /* The value (of type complex double, for example) may not be
7161 aligned in memory in the saved registers, so copy via a
7162 temporary. (This is the same code as used for SPARC.) */
7163 tree tmp = create_tmp_var (type, "va_arg_tmp");
7164 tree dest_addr = build_fold_addr_expr (tmp);
7165
5039610b
SL
7166 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7167 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7168
7169 gimplify_and_add (copy, pre_p);
7170 addr = dest_addr;
7171 }
7172
08b0dc1b 7173 addr = fold_convert (ptrtype, addr);
872a65b5 7174 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7175}
7176
0ac081f6
AH
7177/* Builtins. */
7178
58646b77
PB
7179static void
7180def_builtin (int mask, const char *name, tree type, int code)
7181{
96038623 7182 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7183 {
7184 if (rs6000_builtin_decls[code])
7185 abort ();
7186
7187 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7188 add_builtin_function (name, type, code, BUILT_IN_MD,
7189 NULL, NULL_TREE);
58646b77
PB
7190 }
7191}
0ac081f6 7192
24408032
AH
7193/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7194
2212663f 7195static const struct builtin_description bdesc_3arg[] =
24408032
AH
7196{
7197 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7198 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7199 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7200 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7201 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7202 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7203 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7207 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7210 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7211 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7212 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7213 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7214 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7215 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7216 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7217 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7218 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7219 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7220
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7236
7237 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7238 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7239 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7240 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7241 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7242 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7243 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7244 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7245 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7246};
2212663f 7247
95385cbb
AH
7248/* DST operations: void foo (void *, const int, const char). */
7249
7250static const struct builtin_description bdesc_dst[] =
7251{
7252 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7253 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7254 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7255 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7256
7257 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7258 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7259 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7260 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7261};
7262
2212663f 7263/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7264
a3170dc6 7265static struct builtin_description bdesc_2arg[] =
0ac081f6 7266{
f18c054f
DB
7267 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7268 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7269 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7270 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7271 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7272 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7273 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7274 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7275 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7277 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7278 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7279 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7280 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7281 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7282 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7283 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7285 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7286 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7290 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7291 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7301 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7303 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7304 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7305 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7306 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7307 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7308 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7309 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7310 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7311 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7312 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7313 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7314 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7315 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7316 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7317 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7318 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7319 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7320 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7321 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7322 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7323 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7324 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7325 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7326 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7327 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7328 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7329 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7330 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7331 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7332 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7335 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7336 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7337 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7338 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7339 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7340 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7341 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7342 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7343 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7345 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7346 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7347 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7348 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7349 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7350 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7351 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7352 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7353 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7354 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7355 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7356 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7357 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7358 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7359 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7360 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7361 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7362 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7363 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7364 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7365 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7366 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7367 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7368 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7369 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7370 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7371 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7372 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7373 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7374 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7375 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7377 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7378
58646b77
PB
7379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7482 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7483 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7484 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7485 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7486 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7487 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7501 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7502 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7503 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7504 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7505 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7506
96038623
DE
7507 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7508 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7509 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7510 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7511 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7512 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7513 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7514 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7515 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7516 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7517
a3170dc6
AH
7518 /* Place holder, leave as first spe builtin. */
7519 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7520 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7521 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7522 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7523 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7524 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7525 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7526 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7527 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7528 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7529 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7530 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7531 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7532 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7533 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7534 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7535 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7536 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7537 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7538 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7539 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7540 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7541 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7542 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7543 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7544 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7545 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7546 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7547 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7548 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7549 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7550 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7551 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7552 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7553 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7554 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7555 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7556 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7557 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7558 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7559 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7560 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7561 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7562 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7563 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7564 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7565 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7566 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7567 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7568 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7569 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7570 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7571 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7572 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7573 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7574 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7575 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7576 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7577 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7578 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7579 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7580 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7581 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7582 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7583 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7584 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7585 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7586 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7587 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7588 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7589 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7590 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7591 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7592 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7593 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7594 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7595 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7596 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7597 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7598 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7599 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7600 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7601 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7602 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7603 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7604 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7605 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7606 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7607 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7608 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7609 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7610 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7611 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7612 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7613 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7614 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7615 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7616 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7617 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7618 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7619 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7620 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7621 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7622 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7623 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7624 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7625 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7626 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7627 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7628
7629 /* SPE binary operations expecting a 5-bit unsigned literal. */
7630 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7631
7632 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7633 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7634 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7635 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7636 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7637 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7638 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7639 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7640 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7641 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7642 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7643 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7644 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7645 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7646 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7647 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7648 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7649 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7650 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7651 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7652 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7653 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7654 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7655 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7656 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7657 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7658
7659 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7660 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7661};
7662
7663/* AltiVec predicates. */
7664
7665struct builtin_description_predicates
7666{
7667 const unsigned int mask;
7668 const enum insn_code icode;
7669 const char *opcode;
7670 const char *const name;
7671 const enum rs6000_builtins code;
7672};
7673
7674static const struct builtin_description_predicates bdesc_altivec_preds[] =
7675{
7676 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7677 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7678 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7679 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7680 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7681 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7682 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7683 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7684 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7685 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7686 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7687 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7688 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7689
81f40b79
ILT
7690 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7691 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7692 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7693};
24408032 7694
a3170dc6
AH
7695/* SPE predicates. */
7696static struct builtin_description bdesc_spe_predicates[] =
7697{
7698 /* Place-holder. Leave as first. */
7699 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7700 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7701 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7702 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7703 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7704 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7705 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7706 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7707 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7708 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7709 /* Place-holder. Leave as last. */
7710 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7711};
7712
7713/* SPE evsel predicates. */
7714static struct builtin_description bdesc_spe_evsel[] =
7715{
7716 /* Place-holder. Leave as first. */
7717 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7718 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7719 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7720 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7721 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7722 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7723 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7724 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7725 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7726 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7727 /* Place-holder. Leave as last. */
7728 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7729};
7730
96038623
DE
7731/* PAIRED predicates. */
7732static const struct builtin_description bdesc_paired_preds[] =
7733{
7734 /* Place-holder. Leave as first. */
7735 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7736 /* Place-holder. Leave as last. */
7737 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7738};
7739
b6d08ca1 7740/* ABS* operations. */
100c4561
AH
7741
7742static const struct builtin_description bdesc_abs[] =
7743{
7744 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7745 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7746 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7747 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7748 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7749 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7750 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7751};
7752
617e0e1d
DB
7753/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7754 foo (VECa). */
24408032 7755
a3170dc6 7756static struct builtin_description bdesc_1arg[] =
2212663f 7757{
617e0e1d
DB
7758 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7759 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7760 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7761 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7762 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7763 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7764 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7765 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7766 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7767 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7768 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7769 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7770 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7771 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7772 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7773 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7774 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7775
58646b77
PB
7776 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7777 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7778 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7795
a3170dc6
AH
7796 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7797 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7798 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7799 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7800 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7801 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7802 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7803 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7804 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7805 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7806 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7807 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7808 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7809 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7810 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7811 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7812 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7813 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7814 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7815 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7816 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7817 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7818 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7819 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7820 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7821 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7822 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7823 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7824 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7825 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7826
7827 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7828 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7829
7830 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7831 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7832 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7833 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7834 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7835};
7836
7837static rtx
5039610b 7838rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7839{
7840 rtx pat;
5039610b 7841 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7842 rtx op0 = expand_normal (arg0);
2212663f
DB
7843 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7844 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7845
0559cc77
DE
7846 if (icode == CODE_FOR_nothing)
7847 /* Builtin not supported on this processor. */
7848 return 0;
7849
20e26713
AH
7850 /* If we got invalid arguments bail out before generating bad rtl. */
7851 if (arg0 == error_mark_node)
9a171fcd 7852 return const0_rtx;
20e26713 7853
0559cc77
DE
7854 if (icode == CODE_FOR_altivec_vspltisb
7855 || icode == CODE_FOR_altivec_vspltish
7856 || icode == CODE_FOR_altivec_vspltisw
7857 || icode == CODE_FOR_spe_evsplatfi
7858 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7859 {
7860 /* Only allow 5-bit *signed* literals. */
b44140e7 7861 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7862 || INTVAL (op0) > 15
7863 || INTVAL (op0) < -16)
b44140e7
AH
7864 {
7865 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7866 return const0_rtx;
b44140e7 7867 }
b44140e7
AH
7868 }
7869
c62f2db5 7870 if (target == 0
2212663f
DB
7871 || GET_MODE (target) != tmode
7872 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7873 target = gen_reg_rtx (tmode);
7874
7875 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7876 op0 = copy_to_mode_reg (mode0, op0);
7877
7878 pat = GEN_FCN (icode) (target, op0);
7879 if (! pat)
7880 return 0;
7881 emit_insn (pat);
0ac081f6 7882
2212663f
DB
7883 return target;
7884}
ae4b4a02 7885
100c4561 7886static rtx
5039610b 7887altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7888{
7889 rtx pat, scratch1, scratch2;
5039610b 7890 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7891 rtx op0 = expand_normal (arg0);
100c4561
AH
7892 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7893 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7894
7895 /* If we have invalid arguments, bail out before generating bad rtl. */
7896 if (arg0 == error_mark_node)
9a171fcd 7897 return const0_rtx;
100c4561
AH
7898
7899 if (target == 0
7900 || GET_MODE (target) != tmode
7901 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7902 target = gen_reg_rtx (tmode);
7903
7904 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7905 op0 = copy_to_mode_reg (mode0, op0);
7906
7907 scratch1 = gen_reg_rtx (mode0);
7908 scratch2 = gen_reg_rtx (mode0);
7909
7910 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7911 if (! pat)
7912 return 0;
7913 emit_insn (pat);
7914
7915 return target;
7916}
7917
0ac081f6 7918static rtx
5039610b 7919rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7920{
7921 rtx pat;
5039610b
SL
7922 tree arg0 = CALL_EXPR_ARG (exp, 0);
7923 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7924 rtx op0 = expand_normal (arg0);
7925 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7926 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7927 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7928 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7929
0559cc77
DE
7930 if (icode == CODE_FOR_nothing)
7931 /* Builtin not supported on this processor. */
7932 return 0;
7933
20e26713
AH
7934 /* If we got invalid arguments bail out before generating bad rtl. */
7935 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7936 return const0_rtx;
20e26713 7937
0559cc77
DE
7938 if (icode == CODE_FOR_altivec_vcfux
7939 || icode == CODE_FOR_altivec_vcfsx
7940 || icode == CODE_FOR_altivec_vctsxs
7941 || icode == CODE_FOR_altivec_vctuxs
7942 || icode == CODE_FOR_altivec_vspltb
7943 || icode == CODE_FOR_altivec_vsplth
7944 || icode == CODE_FOR_altivec_vspltw
7945 || icode == CODE_FOR_spe_evaddiw
7946 || icode == CODE_FOR_spe_evldd
7947 || icode == CODE_FOR_spe_evldh
7948 || icode == CODE_FOR_spe_evldw
7949 || icode == CODE_FOR_spe_evlhhesplat
7950 || icode == CODE_FOR_spe_evlhhossplat
7951 || icode == CODE_FOR_spe_evlhhousplat
7952 || icode == CODE_FOR_spe_evlwhe
7953 || icode == CODE_FOR_spe_evlwhos
7954 || icode == CODE_FOR_spe_evlwhou
7955 || icode == CODE_FOR_spe_evlwhsplat
7956 || icode == CODE_FOR_spe_evlwwsplat
7957 || icode == CODE_FOR_spe_evrlwi
7958 || icode == CODE_FOR_spe_evslwi
7959 || icode == CODE_FOR_spe_evsrwis
f5119d10 7960 || icode == CODE_FOR_spe_evsubifw
0559cc77 7961 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7962 {
7963 /* Only allow 5-bit unsigned literals. */
8bb418a3 7964 STRIP_NOPS (arg1);
b44140e7
AH
7965 if (TREE_CODE (arg1) != INTEGER_CST
7966 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7967 {
7968 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7969 return const0_rtx;
b44140e7 7970 }
b44140e7
AH
7971 }
7972
c62f2db5 7973 if (target == 0
0ac081f6
AH
7974 || GET_MODE (target) != tmode
7975 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7976 target = gen_reg_rtx (tmode);
7977
7978 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7979 op0 = copy_to_mode_reg (mode0, op0);
7980 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7981 op1 = copy_to_mode_reg (mode1, op1);
7982
7983 pat = GEN_FCN (icode) (target, op0, op1);
7984 if (! pat)
7985 return 0;
7986 emit_insn (pat);
7987
7988 return target;
7989}
6525c0e7 7990
ae4b4a02 7991static rtx
f676971a 7992altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7993 tree exp, rtx target)
ae4b4a02
AH
7994{
7995 rtx pat, scratch;
5039610b
SL
7996 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7997 tree arg0 = CALL_EXPR_ARG (exp, 1);
7998 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7999 rtx op0 = expand_normal (arg0);
8000 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
8001 enum machine_mode tmode = SImode;
8002 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8003 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8004 int cr6_form_int;
8005
8006 if (TREE_CODE (cr6_form) != INTEGER_CST)
8007 {
8008 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 8009 return const0_rtx;
ae4b4a02
AH
8010 }
8011 else
8012 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8013
37409796 8014 gcc_assert (mode0 == mode1);
ae4b4a02
AH
8015
8016 /* If we have invalid arguments, bail out before generating bad rtl. */
8017 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8018 return const0_rtx;
ae4b4a02
AH
8019
8020 if (target == 0
8021 || GET_MODE (target) != tmode
8022 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8023 target = gen_reg_rtx (tmode);
8024
8025 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8026 op0 = copy_to_mode_reg (mode0, op0);
8027 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8028 op1 = copy_to_mode_reg (mode1, op1);
8029
8030 scratch = gen_reg_rtx (mode0);
8031
8032 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8033 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8034 if (! pat)
8035 return 0;
8036 emit_insn (pat);
8037
8038 /* The vec_any* and vec_all* predicates use the same opcodes for two
8039 different operations, but the bits in CR6 will be different
8040 depending on what information we want. So we have to play tricks
8041 with CR6 to get the right bits out.
8042
8043 If you think this is disgusting, look at the specs for the
8044 AltiVec predicates. */
8045
c4ad648e
AM
8046 switch (cr6_form_int)
8047 {
8048 case 0:
8049 emit_insn (gen_cr6_test_for_zero (target));
8050 break;
8051 case 1:
8052 emit_insn (gen_cr6_test_for_zero_reverse (target));
8053 break;
8054 case 2:
8055 emit_insn (gen_cr6_test_for_lt (target));
8056 break;
8057 case 3:
8058 emit_insn (gen_cr6_test_for_lt_reverse (target));
8059 break;
8060 default:
8061 error ("argument 1 of __builtin_altivec_predicate is out of range");
8062 break;
8063 }
ae4b4a02
AH
8064
8065 return target;
8066}
8067
96038623
DE
8068static rtx
8069paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8070{
8071 rtx pat, addr;
8072 tree arg0 = CALL_EXPR_ARG (exp, 0);
8073 tree arg1 = CALL_EXPR_ARG (exp, 1);
8074 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8075 enum machine_mode mode0 = Pmode;
8076 enum machine_mode mode1 = Pmode;
8077 rtx op0 = expand_normal (arg0);
8078 rtx op1 = expand_normal (arg1);
8079
8080 if (icode == CODE_FOR_nothing)
8081 /* Builtin not supported on this processor. */
8082 return 0;
8083
8084 /* If we got invalid arguments bail out before generating bad rtl. */
8085 if (arg0 == error_mark_node || arg1 == error_mark_node)
8086 return const0_rtx;
8087
8088 if (target == 0
8089 || GET_MODE (target) != tmode
8090 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8091 target = gen_reg_rtx (tmode);
8092
8093 op1 = copy_to_mode_reg (mode1, op1);
8094
8095 if (op0 == const0_rtx)
8096 {
8097 addr = gen_rtx_MEM (tmode, op1);
8098 }
8099 else
8100 {
8101 op0 = copy_to_mode_reg (mode0, op0);
8102 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8103 }
8104
8105 pat = GEN_FCN (icode) (target, addr);
8106
8107 if (! pat)
8108 return 0;
8109 emit_insn (pat);
8110
8111 return target;
8112}
8113
b4a62fa0 8114static rtx
0b61703c 8115altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8116{
8117 rtx pat, addr;
5039610b
SL
8118 tree arg0 = CALL_EXPR_ARG (exp, 0);
8119 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8120 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8121 enum machine_mode mode0 = Pmode;
8122 enum machine_mode mode1 = Pmode;
84217346
MD
8123 rtx op0 = expand_normal (arg0);
8124 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8125
8126 if (icode == CODE_FOR_nothing)
8127 /* Builtin not supported on this processor. */
8128 return 0;
8129
8130 /* If we got invalid arguments bail out before generating bad rtl. */
8131 if (arg0 == error_mark_node || arg1 == error_mark_node)
8132 return const0_rtx;
8133
8134 if (target == 0
8135 || GET_MODE (target) != tmode
8136 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8137 target = gen_reg_rtx (tmode);
8138
f676971a 8139 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8140
8141 if (op0 == const0_rtx)
8142 {
0b61703c 8143 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8144 }
8145 else
8146 {
8147 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8148 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8149 }
8150
8151 pat = GEN_FCN (icode) (target, addr);
8152
8153 if (! pat)
8154 return 0;
8155 emit_insn (pat);
8156
8157 return target;
8158}
8159
61bea3b0 8160static rtx
5039610b 8161spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8162{
5039610b
SL
8163 tree arg0 = CALL_EXPR_ARG (exp, 0);
8164 tree arg1 = CALL_EXPR_ARG (exp, 1);
8165 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8166 rtx op0 = expand_normal (arg0);
8167 rtx op1 = expand_normal (arg1);
8168 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8169 rtx pat;
8170 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8171 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8172 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8173
8174 /* Invalid arguments. Bail before doing anything stoopid! */
8175 if (arg0 == error_mark_node
8176 || arg1 == error_mark_node
8177 || arg2 == error_mark_node)
8178 return const0_rtx;
8179
8180 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8181 op0 = copy_to_mode_reg (mode2, op0);
8182 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8183 op1 = copy_to_mode_reg (mode0, op1);
8184 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8185 op2 = copy_to_mode_reg (mode1, op2);
8186
8187 pat = GEN_FCN (icode) (op1, op2, op0);
8188 if (pat)
8189 emit_insn (pat);
8190 return NULL_RTX;
8191}
8192
96038623
DE
8193static rtx
8194paired_expand_stv_builtin (enum insn_code icode, tree exp)
8195{
8196 tree arg0 = CALL_EXPR_ARG (exp, 0);
8197 tree arg1 = CALL_EXPR_ARG (exp, 1);
8198 tree arg2 = CALL_EXPR_ARG (exp, 2);
8199 rtx op0 = expand_normal (arg0);
8200 rtx op1 = expand_normal (arg1);
8201 rtx op2 = expand_normal (arg2);
8202 rtx pat, addr;
8203 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8204 enum machine_mode mode1 = Pmode;
8205 enum machine_mode mode2 = Pmode;
8206
8207 /* Invalid arguments. Bail before doing anything stoopid! */
8208 if (arg0 == error_mark_node
8209 || arg1 == error_mark_node
8210 || arg2 == error_mark_node)
8211 return const0_rtx;
8212
8213 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8214 op0 = copy_to_mode_reg (tmode, op0);
8215
8216 op2 = copy_to_mode_reg (mode2, op2);
8217
8218 if (op1 == const0_rtx)
8219 {
8220 addr = gen_rtx_MEM (tmode, op2);
8221 }
8222 else
8223 {
8224 op1 = copy_to_mode_reg (mode1, op1);
8225 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8226 }
8227
8228 pat = GEN_FCN (icode) (addr, op0);
8229 if (pat)
8230 emit_insn (pat);
8231 return NULL_RTX;
8232}
8233
6525c0e7 8234static rtx
5039610b 8235altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8236{
5039610b
SL
8237 tree arg0 = CALL_EXPR_ARG (exp, 0);
8238 tree arg1 = CALL_EXPR_ARG (exp, 1);
8239 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8240 rtx op0 = expand_normal (arg0);
8241 rtx op1 = expand_normal (arg1);
8242 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8243 rtx pat, addr;
8244 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8245 enum machine_mode mode1 = Pmode;
8246 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8247
8248 /* Invalid arguments. Bail before doing anything stoopid! */
8249 if (arg0 == error_mark_node
8250 || arg1 == error_mark_node
8251 || arg2 == error_mark_node)
9a171fcd 8252 return const0_rtx;
6525c0e7 8253
b4a62fa0
SB
8254 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8255 op0 = copy_to_mode_reg (tmode, op0);
8256
f676971a 8257 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8258
8259 if (op1 == const0_rtx)
8260 {
8261 addr = gen_rtx_MEM (tmode, op2);
8262 }
8263 else
8264 {
8265 op1 = copy_to_mode_reg (mode1, op1);
8266 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8267 }
6525c0e7 8268
b4a62fa0 8269 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8270 if (pat)
8271 emit_insn (pat);
8272 return NULL_RTX;
8273}
8274
2212663f 8275static rtx
5039610b 8276rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8277{
8278 rtx pat;
5039610b
SL
8279 tree arg0 = CALL_EXPR_ARG (exp, 0);
8280 tree arg1 = CALL_EXPR_ARG (exp, 1);
8281 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8282 rtx op0 = expand_normal (arg0);
8283 rtx op1 = expand_normal (arg1);
8284 rtx op2 = expand_normal (arg2);
2212663f
DB
8285 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8286 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8287 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8288 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8289
774b5662
DE
8290 if (icode == CODE_FOR_nothing)
8291 /* Builtin not supported on this processor. */
8292 return 0;
8293
20e26713
AH
8294 /* If we got invalid arguments bail out before generating bad rtl. */
8295 if (arg0 == error_mark_node
8296 || arg1 == error_mark_node
8297 || arg2 == error_mark_node)
9a171fcd 8298 return const0_rtx;
20e26713 8299
aba5fb01
NS
8300 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8301 || icode == CODE_FOR_altivec_vsldoi_v4si
8302 || icode == CODE_FOR_altivec_vsldoi_v8hi
8303 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8304 {
8305 /* Only allow 4-bit unsigned literals. */
8bb418a3 8306 STRIP_NOPS (arg2);
b44140e7
AH
8307 if (TREE_CODE (arg2) != INTEGER_CST
8308 || TREE_INT_CST_LOW (arg2) & ~0xf)
8309 {
8310 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8311 return const0_rtx;
b44140e7 8312 }
b44140e7
AH
8313 }
8314
c62f2db5 8315 if (target == 0
2212663f
DB
8316 || GET_MODE (target) != tmode
8317 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8318 target = gen_reg_rtx (tmode);
8319
8320 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8321 op0 = copy_to_mode_reg (mode0, op0);
8322 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8323 op1 = copy_to_mode_reg (mode1, op1);
8324 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8325 op2 = copy_to_mode_reg (mode2, op2);
8326
49e39588
RE
8327 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8328 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8329 else
8330 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8331 if (! pat)
8332 return 0;
8333 emit_insn (pat);
8334
8335 return target;
8336}
92898235 8337
3a9b8c7e 8338/* Expand the lvx builtins. */
0ac081f6 8339static rtx
a2369ed3 8340altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8341{
5039610b 8342 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8343 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8344 tree arg0;
8345 enum machine_mode tmode, mode0;
7c3abc73 8346 rtx pat, op0;
3a9b8c7e 8347 enum insn_code icode;
92898235 8348
0ac081f6
AH
8349 switch (fcode)
8350 {
f18c054f 8351 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8352 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8353 break;
f18c054f 8354 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8355 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8356 break;
8357 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8358 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8359 break;
8360 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8361 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8362 break;
8363 default:
8364 *expandedp = false;
8365 return NULL_RTX;
8366 }
0ac081f6 8367
3a9b8c7e 8368 *expandedp = true;
f18c054f 8369
5039610b 8370 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8371 op0 = expand_normal (arg0);
3a9b8c7e
AH
8372 tmode = insn_data[icode].operand[0].mode;
8373 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8374
3a9b8c7e
AH
8375 if (target == 0
8376 || GET_MODE (target) != tmode
8377 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8378 target = gen_reg_rtx (tmode);
24408032 8379
3a9b8c7e
AH
8380 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8381 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8382
3a9b8c7e
AH
8383 pat = GEN_FCN (icode) (target, op0);
8384 if (! pat)
8385 return 0;
8386 emit_insn (pat);
8387 return target;
8388}
f18c054f 8389
3a9b8c7e
AH
8390/* Expand the stvx builtins. */
8391static rtx
f676971a 8392altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8393 bool *expandedp)
3a9b8c7e 8394{
5039610b 8395 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8396 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8397 tree arg0, arg1;
8398 enum machine_mode mode0, mode1;
7c3abc73 8399 rtx pat, op0, op1;
3a9b8c7e 8400 enum insn_code icode;
f18c054f 8401
3a9b8c7e
AH
8402 switch (fcode)
8403 {
8404 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8405 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8406 break;
8407 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8408 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8409 break;
8410 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8411 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8412 break;
8413 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8414 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8415 break;
8416 default:
8417 *expandedp = false;
8418 return NULL_RTX;
8419 }
24408032 8420
5039610b
SL
8421 arg0 = CALL_EXPR_ARG (exp, 0);
8422 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8423 op0 = expand_normal (arg0);
8424 op1 = expand_normal (arg1);
3a9b8c7e
AH
8425 mode0 = insn_data[icode].operand[0].mode;
8426 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8427
3a9b8c7e
AH
8428 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8429 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8430 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8431 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8432
3a9b8c7e
AH
8433 pat = GEN_FCN (icode) (op0, op1);
8434 if (pat)
8435 emit_insn (pat);
f18c054f 8436
3a9b8c7e
AH
8437 *expandedp = true;
8438 return NULL_RTX;
8439}
f18c054f 8440
3a9b8c7e
AH
8441/* Expand the dst builtins. */
8442static rtx
f676971a 8443altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8444 bool *expandedp)
3a9b8c7e 8445{
5039610b 8446 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8447 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8448 tree arg0, arg1, arg2;
8449 enum machine_mode mode0, mode1, mode2;
7c3abc73 8450 rtx pat, op0, op1, op2;
586de218 8451 const struct builtin_description *d;
a3170dc6 8452 size_t i;
f18c054f 8453
3a9b8c7e 8454 *expandedp = false;
f18c054f 8455
3a9b8c7e 8456 /* Handle DST variants. */
586de218 8457 d = bdesc_dst;
3a9b8c7e
AH
8458 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8459 if (d->code == fcode)
8460 {
5039610b
SL
8461 arg0 = CALL_EXPR_ARG (exp, 0);
8462 arg1 = CALL_EXPR_ARG (exp, 1);
8463 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8464 op0 = expand_normal (arg0);
8465 op1 = expand_normal (arg1);
8466 op2 = expand_normal (arg2);
3a9b8c7e
AH
8467 mode0 = insn_data[d->icode].operand[0].mode;
8468 mode1 = insn_data[d->icode].operand[1].mode;
8469 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8470
3a9b8c7e
AH
8471 /* Invalid arguments, bail out before generating bad rtl. */
8472 if (arg0 == error_mark_node
8473 || arg1 == error_mark_node
8474 || arg2 == error_mark_node)
8475 return const0_rtx;
f18c054f 8476
86e7df90 8477 *expandedp = true;
8bb418a3 8478 STRIP_NOPS (arg2);
3a9b8c7e
AH
8479 if (TREE_CODE (arg2) != INTEGER_CST
8480 || TREE_INT_CST_LOW (arg2) & ~0x3)
8481 {
9e637a26 8482 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8483 return const0_rtx;
8484 }
f18c054f 8485
3a9b8c7e 8486 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8487 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8488 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8489 op1 = copy_to_mode_reg (mode1, op1);
24408032 8490
3a9b8c7e
AH
8491 pat = GEN_FCN (d->icode) (op0, op1, op2);
8492 if (pat != 0)
8493 emit_insn (pat);
f18c054f 8494
3a9b8c7e
AH
8495 return NULL_RTX;
8496 }
f18c054f 8497
3a9b8c7e
AH
8498 return NULL_RTX;
8499}
24408032 8500
7a4eca66
DE
8501/* Expand vec_init builtin. */
8502static rtx
5039610b 8503altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8504{
8505 enum machine_mode tmode = TYPE_MODE (type);
8506 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8507 int i, n_elt = GET_MODE_NUNITS (tmode);
8508 rtvec v = rtvec_alloc (n_elt);
8509
8510 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8511 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8512
5039610b 8513 for (i = 0; i < n_elt; ++i)
7a4eca66 8514 {
5039610b 8515 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8516 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8517 }
8518
7a4eca66
DE
8519 if (!target || !register_operand (target, tmode))
8520 target = gen_reg_rtx (tmode);
8521
8522 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8523 return target;
8524}
8525
8526/* Return the integer constant in ARG. Constrain it to be in the range
8527 of the subparts of VEC_TYPE; issue an error if not. */
8528
8529static int
8530get_element_number (tree vec_type, tree arg)
8531{
8532 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8533
8534 if (!host_integerp (arg, 1)
8535 || (elt = tree_low_cst (arg, 1), elt > max))
8536 {
8537 error ("selector must be an integer constant in the range 0..%wi", max);
8538 return 0;
8539 }
8540
8541 return elt;
8542}
8543
8544/* Expand vec_set builtin. */
8545static rtx
5039610b 8546altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8547{
8548 enum machine_mode tmode, mode1;
8549 tree arg0, arg1, arg2;
8550 int elt;
8551 rtx op0, op1;
8552
5039610b
SL
8553 arg0 = CALL_EXPR_ARG (exp, 0);
8554 arg1 = CALL_EXPR_ARG (exp, 1);
8555 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8556
8557 tmode = TYPE_MODE (TREE_TYPE (arg0));
8558 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8559 gcc_assert (VECTOR_MODE_P (tmode));
8560
bbbbb16a
ILT
8561 op0 = expand_expr (arg0, NULL_RTX, tmode, EXPAND_NORMAL);
8562 op1 = expand_expr (arg1, NULL_RTX, mode1, EXPAND_NORMAL);
7a4eca66
DE
8563 elt = get_element_number (TREE_TYPE (arg0), arg2);
8564
8565 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8566 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8567
8568 op0 = force_reg (tmode, op0);
8569 op1 = force_reg (mode1, op1);
8570
8571 rs6000_expand_vector_set (op0, op1, elt);
8572
8573 return op0;
8574}
8575
8576/* Expand vec_ext builtin. */
8577static rtx
5039610b 8578altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8579{
8580 enum machine_mode tmode, mode0;
8581 tree arg0, arg1;
8582 int elt;
8583 rtx op0;
8584
5039610b
SL
8585 arg0 = CALL_EXPR_ARG (exp, 0);
8586 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8587
84217346 8588 op0 = expand_normal (arg0);
7a4eca66
DE
8589 elt = get_element_number (TREE_TYPE (arg0), arg1);
8590
8591 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8592 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8593 gcc_assert (VECTOR_MODE_P (mode0));
8594
8595 op0 = force_reg (mode0, op0);
8596
8597 if (optimize || !target || !register_operand (target, tmode))
8598 target = gen_reg_rtx (tmode);
8599
8600 rs6000_expand_vector_extract (target, op0, elt);
8601
8602 return target;
8603}
8604
3a9b8c7e
AH
8605/* Expand the builtin in EXP and store the result in TARGET. Store
8606 true in *EXPANDEDP if we found a builtin to expand. */
8607static rtx
a2369ed3 8608altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8609{
586de218
KG
8610 const struct builtin_description *d;
8611 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8612 size_t i;
8613 enum insn_code icode;
5039610b 8614 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8615 tree arg0;
8616 rtx op0, pat;
8617 enum machine_mode tmode, mode0;
3a9b8c7e 8618 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8619
58646b77
PB
8620 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8621 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8622 {
8623 *expandedp = true;
ea40ba9c 8624 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8625 return const0_rtx;
8626 }
8627
3a9b8c7e
AH
8628 target = altivec_expand_ld_builtin (exp, target, expandedp);
8629 if (*expandedp)
8630 return target;
0ac081f6 8631
3a9b8c7e
AH
8632 target = altivec_expand_st_builtin (exp, target, expandedp);
8633 if (*expandedp)
8634 return target;
8635
8636 target = altivec_expand_dst_builtin (exp, target, expandedp);
8637 if (*expandedp)
8638 return target;
8639
8640 *expandedp = true;
95385cbb 8641
3a9b8c7e
AH
8642 switch (fcode)
8643 {
6525c0e7 8644 case ALTIVEC_BUILTIN_STVX:
5039610b 8645 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8646 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8647 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8648 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8649 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8650 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8651 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8652 case ALTIVEC_BUILTIN_STVXL:
5039610b 8653 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8654
0b61703c
AP
8655 case ALTIVEC_BUILTIN_STVLX:
8656 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8657 case ALTIVEC_BUILTIN_STVLXL:
8658 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8659 case ALTIVEC_BUILTIN_STVRX:
8660 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8661 case ALTIVEC_BUILTIN_STVRXL:
8662 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8663
95385cbb
AH
8664 case ALTIVEC_BUILTIN_MFVSCR:
8665 icode = CODE_FOR_altivec_mfvscr;
8666 tmode = insn_data[icode].operand[0].mode;
8667
8668 if (target == 0
8669 || GET_MODE (target) != tmode
8670 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8671 target = gen_reg_rtx (tmode);
f676971a 8672
95385cbb 8673 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8674 if (! pat)
8675 return 0;
8676 emit_insn (pat);
95385cbb
AH
8677 return target;
8678
8679 case ALTIVEC_BUILTIN_MTVSCR:
8680 icode = CODE_FOR_altivec_mtvscr;
5039610b 8681 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8682 op0 = expand_normal (arg0);
95385cbb
AH
8683 mode0 = insn_data[icode].operand[0].mode;
8684
8685 /* If we got invalid arguments bail out before generating bad rtl. */
8686 if (arg0 == error_mark_node)
9a171fcd 8687 return const0_rtx;
95385cbb
AH
8688
8689 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8690 op0 = copy_to_mode_reg (mode0, op0);
8691
8692 pat = GEN_FCN (icode) (op0);
8693 if (pat)
8694 emit_insn (pat);
8695 return NULL_RTX;
3a9b8c7e 8696
95385cbb
AH
8697 case ALTIVEC_BUILTIN_DSSALL:
8698 emit_insn (gen_altivec_dssall ());
8699 return NULL_RTX;
8700
8701 case ALTIVEC_BUILTIN_DSS:
8702 icode = CODE_FOR_altivec_dss;
5039610b 8703 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8704 STRIP_NOPS (arg0);
84217346 8705 op0 = expand_normal (arg0);
95385cbb
AH
8706 mode0 = insn_data[icode].operand[0].mode;
8707
8708 /* If we got invalid arguments bail out before generating bad rtl. */
8709 if (arg0 == error_mark_node)
9a171fcd 8710 return const0_rtx;
95385cbb 8711
b44140e7
AH
8712 if (TREE_CODE (arg0) != INTEGER_CST
8713 || TREE_INT_CST_LOW (arg0) & ~0x3)
8714 {
8715 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8716 return const0_rtx;
b44140e7
AH
8717 }
8718
95385cbb
AH
8719 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8720 op0 = copy_to_mode_reg (mode0, op0);
8721
8722 emit_insn (gen_altivec_dss (op0));
0ac081f6 8723 return NULL_RTX;
7a4eca66
DE
8724
8725 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8726 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8727 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8728 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8729 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8730
8731 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8732 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8733 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8734 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8735 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8736
8737 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8738 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8739 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8740 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8741 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8742
8743 default:
8744 break;
8745 /* Fall through. */
0ac081f6 8746 }
24408032 8747
100c4561 8748 /* Expand abs* operations. */
586de218 8749 d = bdesc_abs;
ca7558fc 8750 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8751 if (d->code == fcode)
5039610b 8752 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8753
ae4b4a02 8754 /* Expand the AltiVec predicates. */
586de218 8755 dp = bdesc_altivec_preds;
ca7558fc 8756 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8757 if (dp->code == fcode)
c4ad648e 8758 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8759 exp, target);
ae4b4a02 8760
6525c0e7
AH
8761 /* LV* are funky. We initialized them differently. */
8762 switch (fcode)
8763 {
8764 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8765 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 8766 exp, target, false);
6525c0e7 8767 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8768 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 8769 exp, target, false);
6525c0e7 8770 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8771 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 8772 exp, target, false);
6525c0e7 8773 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8774 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 8775 exp, target, false);
6525c0e7 8776 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8777 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 8778 exp, target, false);
6525c0e7 8779 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8780 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 8781 exp, target, false);
6525c0e7 8782 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8783 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
8784 exp, target, false);
8785 case ALTIVEC_BUILTIN_LVLX:
8786 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8787 exp, target, true);
8788 case ALTIVEC_BUILTIN_LVLXL:
8789 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8790 exp, target, true);
8791 case ALTIVEC_BUILTIN_LVRX:
8792 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8793 exp, target, true);
8794 case ALTIVEC_BUILTIN_LVRXL:
8795 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8796 exp, target, true);
6525c0e7
AH
8797 default:
8798 break;
8799 /* Fall through. */
8800 }
95385cbb 8801
92898235 8802 *expandedp = false;
0ac081f6
AH
8803 return NULL_RTX;
8804}
8805
96038623
DE
8806/* Expand the builtin in EXP and store the result in TARGET. Store
8807 true in *EXPANDEDP if we found a builtin to expand. */
8808static rtx
8809paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8810{
8811 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8812 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8813 const struct builtin_description *d;
96038623
DE
8814 size_t i;
8815
8816 *expandedp = true;
8817
8818 switch (fcode)
8819 {
8820 case PAIRED_BUILTIN_STX:
8821 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8822 case PAIRED_BUILTIN_LX:
8823 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8824 default:
8825 break;
8826 /* Fall through. */
8827 }
8828
8829 /* Expand the paired predicates. */
23a651fc 8830 d = bdesc_paired_preds;
96038623
DE
8831 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8832 if (d->code == fcode)
8833 return paired_expand_predicate_builtin (d->icode, exp, target);
8834
8835 *expandedp = false;
8836 return NULL_RTX;
8837}
8838
a3170dc6
AH
8839/* Binops that need to be initialized manually, but can be expanded
8840 automagically by rs6000_expand_binop_builtin. */
8841static struct builtin_description bdesc_2arg_spe[] =
8842{
8843 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8844 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8845 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8846 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8847 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8848 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8849 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8850 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8851 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8852 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8853 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8854 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8855 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8856 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8857 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8858 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8859 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8860 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8861 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8862 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8863 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8864 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8865};
8866
8867/* Expand the builtin in EXP and store the result in TARGET. Store
8868 true in *EXPANDEDP if we found a builtin to expand.
8869
8870 This expands the SPE builtins that are not simple unary and binary
8871 operations. */
8872static rtx
a2369ed3 8873spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8874{
5039610b 8875 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8876 tree arg1, arg0;
8877 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8878 enum insn_code icode;
8879 enum machine_mode tmode, mode0;
8880 rtx pat, op0;
8881 struct builtin_description *d;
8882 size_t i;
8883
8884 *expandedp = true;
8885
8886 /* Syntax check for a 5-bit unsigned immediate. */
8887 switch (fcode)
8888 {
8889 case SPE_BUILTIN_EVSTDD:
8890 case SPE_BUILTIN_EVSTDH:
8891 case SPE_BUILTIN_EVSTDW:
8892 case SPE_BUILTIN_EVSTWHE:
8893 case SPE_BUILTIN_EVSTWHO:
8894 case SPE_BUILTIN_EVSTWWE:
8895 case SPE_BUILTIN_EVSTWWO:
5039610b 8896 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8897 if (TREE_CODE (arg1) != INTEGER_CST
8898 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8899 {
8900 error ("argument 2 must be a 5-bit unsigned literal");
8901 return const0_rtx;
8902 }
8903 break;
8904 default:
8905 break;
8906 }
8907
00332c9f
AH
8908 /* The evsplat*i instructions are not quite generic. */
8909 switch (fcode)
8910 {
8911 case SPE_BUILTIN_EVSPLATFI:
8912 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8913 exp, target);
00332c9f
AH
8914 case SPE_BUILTIN_EVSPLATI:
8915 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8916 exp, target);
00332c9f
AH
8917 default:
8918 break;
8919 }
8920
a3170dc6
AH
8921 d = (struct builtin_description *) bdesc_2arg_spe;
8922 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8923 if (d->code == fcode)
5039610b 8924 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8925
8926 d = (struct builtin_description *) bdesc_spe_predicates;
8927 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8928 if (d->code == fcode)
5039610b 8929 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8930
8931 d = (struct builtin_description *) bdesc_spe_evsel;
8932 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8933 if (d->code == fcode)
5039610b 8934 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8935
8936 switch (fcode)
8937 {
8938 case SPE_BUILTIN_EVSTDDX:
5039610b 8939 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8940 case SPE_BUILTIN_EVSTDHX:
5039610b 8941 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8942 case SPE_BUILTIN_EVSTDWX:
5039610b 8943 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8944 case SPE_BUILTIN_EVSTWHEX:
5039610b 8945 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8946 case SPE_BUILTIN_EVSTWHOX:
5039610b 8947 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8948 case SPE_BUILTIN_EVSTWWEX:
5039610b 8949 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8950 case SPE_BUILTIN_EVSTWWOX:
5039610b 8951 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8952 case SPE_BUILTIN_EVSTDD:
5039610b 8953 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8954 case SPE_BUILTIN_EVSTDH:
5039610b 8955 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8956 case SPE_BUILTIN_EVSTDW:
5039610b 8957 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8958 case SPE_BUILTIN_EVSTWHE:
5039610b 8959 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8960 case SPE_BUILTIN_EVSTWHO:
5039610b 8961 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8962 case SPE_BUILTIN_EVSTWWE:
5039610b 8963 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8964 case SPE_BUILTIN_EVSTWWO:
5039610b 8965 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8966 case SPE_BUILTIN_MFSPEFSCR:
8967 icode = CODE_FOR_spe_mfspefscr;
8968 tmode = insn_data[icode].operand[0].mode;
8969
8970 if (target == 0
8971 || GET_MODE (target) != tmode
8972 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8973 target = gen_reg_rtx (tmode);
f676971a 8974
a3170dc6
AH
8975 pat = GEN_FCN (icode) (target);
8976 if (! pat)
8977 return 0;
8978 emit_insn (pat);
8979 return target;
8980 case SPE_BUILTIN_MTSPEFSCR:
8981 icode = CODE_FOR_spe_mtspefscr;
5039610b 8982 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8983 op0 = expand_normal (arg0);
a3170dc6
AH
8984 mode0 = insn_data[icode].operand[0].mode;
8985
8986 if (arg0 == error_mark_node)
8987 return const0_rtx;
8988
8989 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8990 op0 = copy_to_mode_reg (mode0, op0);
8991
8992 pat = GEN_FCN (icode) (op0);
8993 if (pat)
8994 emit_insn (pat);
8995 return NULL_RTX;
8996 default:
8997 break;
8998 }
8999
9000 *expandedp = false;
9001 return NULL_RTX;
9002}
9003
96038623
DE
9004static rtx
9005paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9006{
9007 rtx pat, scratch, tmp;
9008 tree form = CALL_EXPR_ARG (exp, 0);
9009 tree arg0 = CALL_EXPR_ARG (exp, 1);
9010 tree arg1 = CALL_EXPR_ARG (exp, 2);
9011 rtx op0 = expand_normal (arg0);
9012 rtx op1 = expand_normal (arg1);
9013 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9014 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9015 int form_int;
9016 enum rtx_code code;
9017
9018 if (TREE_CODE (form) != INTEGER_CST)
9019 {
9020 error ("argument 1 of __builtin_paired_predicate must be a constant");
9021 return const0_rtx;
9022 }
9023 else
9024 form_int = TREE_INT_CST_LOW (form);
9025
9026 gcc_assert (mode0 == mode1);
9027
9028 if (arg0 == error_mark_node || arg1 == error_mark_node)
9029 return const0_rtx;
9030
9031 if (target == 0
9032 || GET_MODE (target) != SImode
9033 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9034 target = gen_reg_rtx (SImode);
9035 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9036 op0 = copy_to_mode_reg (mode0, op0);
9037 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9038 op1 = copy_to_mode_reg (mode1, op1);
9039
9040 scratch = gen_reg_rtx (CCFPmode);
9041
9042 pat = GEN_FCN (icode) (scratch, op0, op1);
9043 if (!pat)
9044 return const0_rtx;
9045
9046 emit_insn (pat);
9047
9048 switch (form_int)
9049 {
9050 /* LT bit. */
9051 case 0:
9052 code = LT;
9053 break;
9054 /* GT bit. */
9055 case 1:
9056 code = GT;
9057 break;
9058 /* EQ bit. */
9059 case 2:
9060 code = EQ;
9061 break;
9062 /* UN bit. */
9063 case 3:
9064 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9065 return target;
9066 default:
9067 error ("argument 1 of __builtin_paired_predicate is out of range");
9068 return const0_rtx;
9069 }
9070
9071 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9072 emit_move_insn (target, tmp);
9073 return target;
9074}
9075
a3170dc6 9076static rtx
5039610b 9077spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9078{
9079 rtx pat, scratch, tmp;
5039610b
SL
9080 tree form = CALL_EXPR_ARG (exp, 0);
9081 tree arg0 = CALL_EXPR_ARG (exp, 1);
9082 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9083 rtx op0 = expand_normal (arg0);
9084 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9085 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9086 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9087 int form_int;
9088 enum rtx_code code;
9089
9090 if (TREE_CODE (form) != INTEGER_CST)
9091 {
9092 error ("argument 1 of __builtin_spe_predicate must be a constant");
9093 return const0_rtx;
9094 }
9095 else
9096 form_int = TREE_INT_CST_LOW (form);
9097
37409796 9098 gcc_assert (mode0 == mode1);
a3170dc6
AH
9099
9100 if (arg0 == error_mark_node || arg1 == error_mark_node)
9101 return const0_rtx;
9102
9103 if (target == 0
9104 || GET_MODE (target) != SImode
9105 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9106 target = gen_reg_rtx (SImode);
9107
9108 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9109 op0 = copy_to_mode_reg (mode0, op0);
9110 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9111 op1 = copy_to_mode_reg (mode1, op1);
9112
9113 scratch = gen_reg_rtx (CCmode);
9114
9115 pat = GEN_FCN (icode) (scratch, op0, op1);
9116 if (! pat)
9117 return const0_rtx;
9118 emit_insn (pat);
9119
9120 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9121 _lower_. We use one compare, but look in different bits of the
9122 CR for each variant.
9123
9124 There are 2 elements in each SPE simd type (upper/lower). The CR
9125 bits are set as follows:
9126
9127 BIT0 | BIT 1 | BIT 2 | BIT 3
9128 U | L | (U | L) | (U & L)
9129
9130 So, for an "all" relationship, BIT 3 would be set.
9131 For an "any" relationship, BIT 2 would be set. Etc.
9132
9133 Following traditional nomenclature, these bits map to:
9134
9135 BIT0 | BIT 1 | BIT 2 | BIT 3
9136 LT | GT | EQ | OV
9137
9138 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9139 */
9140
9141 switch (form_int)
9142 {
9143 /* All variant. OV bit. */
9144 case 0:
9145 /* We need to get to the OV bit, which is the ORDERED bit. We
9146 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9147 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9148 So let's just use another pattern. */
9149 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9150 return target;
9151 /* Any variant. EQ bit. */
9152 case 1:
9153 code = EQ;
9154 break;
9155 /* Upper variant. LT bit. */
9156 case 2:
9157 code = LT;
9158 break;
9159 /* Lower variant. GT bit. */
9160 case 3:
9161 code = GT;
9162 break;
9163 default:
9164 error ("argument 1 of __builtin_spe_predicate is out of range");
9165 return const0_rtx;
9166 }
9167
9168 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9169 emit_move_insn (target, tmp);
9170
9171 return target;
9172}
9173
9174/* The evsel builtins look like this:
9175
9176 e = __builtin_spe_evsel_OP (a, b, c, d);
9177
9178 and work like this:
9179
9180 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9181 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9182*/
9183
9184static rtx
5039610b 9185spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9186{
9187 rtx pat, scratch;
5039610b
SL
9188 tree arg0 = CALL_EXPR_ARG (exp, 0);
9189 tree arg1 = CALL_EXPR_ARG (exp, 1);
9190 tree arg2 = CALL_EXPR_ARG (exp, 2);
9191 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9192 rtx op0 = expand_normal (arg0);
9193 rtx op1 = expand_normal (arg1);
9194 rtx op2 = expand_normal (arg2);
9195 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9196 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9197 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9198
37409796 9199 gcc_assert (mode0 == mode1);
a3170dc6
AH
9200
9201 if (arg0 == error_mark_node || arg1 == error_mark_node
9202 || arg2 == error_mark_node || arg3 == error_mark_node)
9203 return const0_rtx;
9204
9205 if (target == 0
9206 || GET_MODE (target) != mode0
9207 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9208 target = gen_reg_rtx (mode0);
9209
9210 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9211 op0 = copy_to_mode_reg (mode0, op0);
9212 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9213 op1 = copy_to_mode_reg (mode0, op1);
9214 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9215 op2 = copy_to_mode_reg (mode0, op2);
9216 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9217 op3 = copy_to_mode_reg (mode0, op3);
9218
9219 /* Generate the compare. */
9220 scratch = gen_reg_rtx (CCmode);
9221 pat = GEN_FCN (icode) (scratch, op0, op1);
9222 if (! pat)
9223 return const0_rtx;
9224 emit_insn (pat);
9225
9226 if (mode0 == V2SImode)
9227 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9228 else
9229 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9230
9231 return target;
9232}
9233
0ac081f6
AH
9234/* Expand an expression EXP that calls a built-in function,
9235 with result going to TARGET if that's convenient
9236 (and in mode MODE if that's convenient).
9237 SUBTARGET may be used as the target for computing one of EXP's operands.
9238 IGNORE is nonzero if the value is to be ignored. */
9239
9240static rtx
a2369ed3 9241rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9242 enum machine_mode mode ATTRIBUTE_UNUSED,
9243 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9244{
5039610b 9245 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9246 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9247 const struct builtin_description *d;
92898235
AH
9248 size_t i;
9249 rtx ret;
9250 bool success;
f676971a 9251
9c78b944
DE
9252 if (fcode == RS6000_BUILTIN_RECIP)
9253 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9254
9255 if (fcode == RS6000_BUILTIN_RECIPF)
9256 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9257
9258 if (fcode == RS6000_BUILTIN_RSQRTF)
9259 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9260
7ccf35ed
DN
9261 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9262 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9263 {
9264 int icode = (int) CODE_FOR_altivec_lvsr;
9265 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9266 enum machine_mode mode = insn_data[icode].operand[1].mode;
9267 tree arg;
9268 rtx op, addr, pat;
9269
37409796 9270 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9271
5039610b 9272 arg = CALL_EXPR_ARG (exp, 0);
37409796 9273 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9274 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9275 addr = memory_address (mode, op);
9276 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9277 op = addr;
9278 else
9279 {
9280 /* For the load case need to negate the address. */
9281 op = gen_reg_rtx (GET_MODE (addr));
9282 emit_insn (gen_rtx_SET (VOIDmode, op,
9283 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9284 }
7ccf35ed
DN
9285 op = gen_rtx_MEM (mode, op);
9286
9287 if (target == 0
9288 || GET_MODE (target) != tmode
9289 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9290 target = gen_reg_rtx (tmode);
9291
9292 /*pat = gen_altivec_lvsr (target, op);*/
9293 pat = GEN_FCN (icode) (target, op);
9294 if (!pat)
9295 return 0;
9296 emit_insn (pat);
9297
9298 return target;
9299 }
5039610b
SL
9300
9301 /* FIXME: There's got to be a nicer way to handle this case than
9302 constructing a new CALL_EXPR. */
f57d17f1 9303 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9304 || fcode == ALTIVEC_BUILTIN_VCFSX
9305 || fcode == ALTIVEC_BUILTIN_VCTUXS
9306 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9307 {
5039610b
SL
9308 if (call_expr_nargs (exp) == 1)
9309 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9310 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9311 }
7ccf35ed 9312
0ac081f6 9313 if (TARGET_ALTIVEC)
92898235
AH
9314 {
9315 ret = altivec_expand_builtin (exp, target, &success);
9316
a3170dc6
AH
9317 if (success)
9318 return ret;
9319 }
9320 if (TARGET_SPE)
9321 {
9322 ret = spe_expand_builtin (exp, target, &success);
9323
92898235
AH
9324 if (success)
9325 return ret;
9326 }
96038623
DE
9327 if (TARGET_PAIRED_FLOAT)
9328 {
9329 ret = paired_expand_builtin (exp, target, &success);
9330
9331 if (success)
9332 return ret;
9333 }
92898235 9334
96038623 9335 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9336
37409796
NS
9337 /* Handle simple unary operations. */
9338 d = (struct builtin_description *) bdesc_1arg;
9339 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9340 if (d->code == fcode)
5039610b 9341 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9342
37409796
NS
9343 /* Handle simple binary operations. */
9344 d = (struct builtin_description *) bdesc_2arg;
9345 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9346 if (d->code == fcode)
5039610b 9347 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9348
37409796 9349 /* Handle simple ternary operations. */
586de218 9350 d = bdesc_3arg;
37409796
NS
9351 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9352 if (d->code == fcode)
5039610b 9353 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9354
37409796 9355 gcc_unreachable ();
0ac081f6
AH
9356}
9357
9358static void
863d938c 9359rs6000_init_builtins (void)
0ac081f6 9360{
5afaa917
NS
9361 tree tdecl;
9362
4a5eab38
PB
9363 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9364 V2SF_type_node = build_vector_type (float_type_node, 2);
9365 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9366 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9367 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9368 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9369 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9370
9371 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9372 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9373 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9374
7c62e993
PB
9375 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9376 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9377 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
b6fc2cdb 9378 opaque_V4SI_type_node = build_opaque_vector_type (intSI_type_node, 4);
3fdaa45a 9379
8bb418a3
ZL
9380 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9381 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9382 'vector unsigned short'. */
9383
8dd16ecc
NS
9384 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9385 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9386 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9387 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9388
58646b77
PB
9389 long_integer_type_internal_node = long_integer_type_node;
9390 long_unsigned_type_internal_node = long_unsigned_type_node;
9391 intQI_type_internal_node = intQI_type_node;
9392 uintQI_type_internal_node = unsigned_intQI_type_node;
9393 intHI_type_internal_node = intHI_type_node;
9394 uintHI_type_internal_node = unsigned_intHI_type_node;
9395 intSI_type_internal_node = intSI_type_node;
9396 uintSI_type_internal_node = unsigned_intSI_type_node;
9397 float_type_internal_node = float_type_node;
9398 void_type_internal_node = void_type_node;
9399
5afaa917
NS
9400 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool char"),
9401 bool_char_type_node);
9402 TYPE_NAME (bool_char_type_node) = tdecl;
9403 (*lang_hooks.decls.pushdecl) (tdecl);
9404 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool short"),
9405 bool_short_type_node);
9406 TYPE_NAME (bool_short_type_node) = tdecl;
9407 (*lang_hooks.decls.pushdecl) (tdecl);
9408 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool int"),
9409 bool_int_type_node);
9410 TYPE_NAME (bool_int_type_node) = tdecl;
9411 (*lang_hooks.decls.pushdecl) (tdecl);
9412 tdecl = build_decl (TYPE_DECL, get_identifier ("__pixel"),
9413 pixel_type_node);
9414 TYPE_NAME (pixel_type_node) = tdecl;
9415 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9416
4a5eab38
PB
9417 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9418 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9419 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9420 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3 9421
5afaa917
NS
9422 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned char"),
9423 unsigned_V16QI_type_node);
9424 TYPE_NAME (unsigned_V16QI_type_node) = tdecl;
9425 (*lang_hooks.decls.pushdecl) (tdecl);
9426 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed char"),
9427 V16QI_type_node);
9428 TYPE_NAME (V16QI_type_node) = tdecl;
9429 (*lang_hooks.decls.pushdecl) (tdecl);
9430 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool char"),
9431 bool_V16QI_type_node);
9432 TYPE_NAME ( bool_V16QI_type_node) = tdecl;
9433 (*lang_hooks.decls.pushdecl) (tdecl);
9434
9435 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned short"),
9436 unsigned_V8HI_type_node);
9437 TYPE_NAME (unsigned_V8HI_type_node) = tdecl;
9438 (*lang_hooks.decls.pushdecl) (tdecl);
9439 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed short"),
9440 V8HI_type_node);
9441 TYPE_NAME (V8HI_type_node) = tdecl;
9442 (*lang_hooks.decls.pushdecl) (tdecl);
9443 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool short"),
9444 bool_V8HI_type_node);
9445 TYPE_NAME (bool_V8HI_type_node) = tdecl;
9446 (*lang_hooks.decls.pushdecl) (tdecl);
9447
9448 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned int"),
9449 unsigned_V4SI_type_node);
9450 TYPE_NAME (unsigned_V4SI_type_node) = tdecl;
9451 (*lang_hooks.decls.pushdecl) (tdecl);
9452 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed int"),
9453 V4SI_type_node);
9454 TYPE_NAME (V4SI_type_node) = tdecl;
9455 (*lang_hooks.decls.pushdecl) (tdecl);
9456 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool int"),
9457 bool_V4SI_type_node);
9458 TYPE_NAME (bool_V4SI_type_node) = tdecl;
9459 (*lang_hooks.decls.pushdecl) (tdecl);
9460
9461 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector float"),
9462 V4SF_type_node);
9463 TYPE_NAME (V4SF_type_node) = tdecl;
9464 (*lang_hooks.decls.pushdecl) (tdecl);
9465 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __pixel"),
9466 pixel_V8HI_type_node);
9467 TYPE_NAME (pixel_V8HI_type_node) = tdecl;
9468 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9469
96038623
DE
9470 if (TARGET_PAIRED_FLOAT)
9471 paired_init_builtins ();
a3170dc6 9472 if (TARGET_SPE)
3fdaa45a 9473 spe_init_builtins ();
0ac081f6
AH
9474 if (TARGET_ALTIVEC)
9475 altivec_init_builtins ();
96038623 9476 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9477 rs6000_common_init_builtins ();
9c78b944
DE
9478 if (TARGET_PPC_GFXOPT)
9479 {
9480 tree ftype = build_function_type_list (float_type_node,
9481 float_type_node,
9482 float_type_node,
9483 NULL_TREE);
9484 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9485 RS6000_BUILTIN_RECIPF);
9486
9487 ftype = build_function_type_list (float_type_node,
9488 float_type_node,
9489 NULL_TREE);
9490 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9491 RS6000_BUILTIN_RSQRTF);
9492 }
9493 if (TARGET_POPCNTB)
9494 {
9495 tree ftype = build_function_type_list (double_type_node,
9496 double_type_node,
9497 double_type_node,
9498 NULL_TREE);
9499 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9500 RS6000_BUILTIN_RECIP);
9501
9502 }
69ca3549
DE
9503
9504#if TARGET_XCOFF
9505 /* AIX libm provides clog as __clog. */
9506 if (built_in_decls [BUILT_IN_CLOG])
9507 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9508#endif
fb220235
FXC
9509
9510#ifdef SUBTARGET_INIT_BUILTINS
9511 SUBTARGET_INIT_BUILTINS;
9512#endif
0ac081f6
AH
9513}
9514
a3170dc6
AH
9515/* Search through a set of builtins and enable the mask bits.
9516 DESC is an array of builtins.
b6d08ca1 9517 SIZE is the total number of builtins.
a3170dc6
AH
9518 START is the builtin enum at which to start.
9519 END is the builtin enum at which to end. */
0ac081f6 9520static void
a2369ed3 9521enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9522 enum rs6000_builtins start,
a2369ed3 9523 enum rs6000_builtins end)
a3170dc6
AH
9524{
9525 int i;
9526
9527 for (i = 0; i < size; ++i)
9528 if (desc[i].code == start)
9529 break;
9530
9531 if (i == size)
9532 return;
9533
9534 for (; i < size; ++i)
9535 {
9536 /* Flip all the bits on. */
9537 desc[i].mask = target_flags;
9538 if (desc[i].code == end)
9539 break;
9540 }
9541}
9542
9543static void
863d938c 9544spe_init_builtins (void)
0ac081f6 9545{
a3170dc6
AH
9546 tree endlink = void_list_node;
9547 tree puint_type_node = build_pointer_type (unsigned_type_node);
9548 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9549 struct builtin_description *d;
0ac081f6
AH
9550 size_t i;
9551
a3170dc6
AH
9552 tree v2si_ftype_4_v2si
9553 = build_function_type
3fdaa45a
AH
9554 (opaque_V2SI_type_node,
9555 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9556 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9557 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9558 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9559 endlink)))));
9560
9561 tree v2sf_ftype_4_v2sf
9562 = build_function_type
3fdaa45a
AH
9563 (opaque_V2SF_type_node,
9564 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9565 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9566 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9567 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9568 endlink)))));
9569
9570 tree int_ftype_int_v2si_v2si
9571 = build_function_type
9572 (integer_type_node,
9573 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9574 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9575 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9576 endlink))));
9577
9578 tree int_ftype_int_v2sf_v2sf
9579 = build_function_type
9580 (integer_type_node,
9581 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9582 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9583 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9584 endlink))));
9585
9586 tree void_ftype_v2si_puint_int
9587 = build_function_type (void_type_node,
3fdaa45a 9588 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9589 tree_cons (NULL_TREE, puint_type_node,
9590 tree_cons (NULL_TREE,
9591 integer_type_node,
9592 endlink))));
9593
9594 tree void_ftype_v2si_puint_char
9595 = build_function_type (void_type_node,
3fdaa45a 9596 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9597 tree_cons (NULL_TREE, puint_type_node,
9598 tree_cons (NULL_TREE,
9599 char_type_node,
9600 endlink))));
9601
9602 tree void_ftype_v2si_pv2si_int
9603 = build_function_type (void_type_node,
3fdaa45a 9604 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9605 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9606 tree_cons (NULL_TREE,
9607 integer_type_node,
9608 endlink))));
9609
9610 tree void_ftype_v2si_pv2si_char
9611 = build_function_type (void_type_node,
3fdaa45a 9612 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9613 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9614 tree_cons (NULL_TREE,
9615 char_type_node,
9616 endlink))));
9617
9618 tree void_ftype_int
9619 = build_function_type (void_type_node,
9620 tree_cons (NULL_TREE, integer_type_node, endlink));
9621
9622 tree int_ftype_void
36e8d515 9623 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9624
9625 tree v2si_ftype_pv2si_int
3fdaa45a 9626 = build_function_type (opaque_V2SI_type_node,
6035d635 9627 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9628 tree_cons (NULL_TREE, integer_type_node,
9629 endlink)));
9630
9631 tree v2si_ftype_puint_int
3fdaa45a 9632 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9633 tree_cons (NULL_TREE, puint_type_node,
9634 tree_cons (NULL_TREE, integer_type_node,
9635 endlink)));
9636
9637 tree v2si_ftype_pushort_int
3fdaa45a 9638 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9639 tree_cons (NULL_TREE, pushort_type_node,
9640 tree_cons (NULL_TREE, integer_type_node,
9641 endlink)));
9642
00332c9f
AH
9643 tree v2si_ftype_signed_char
9644 = build_function_type (opaque_V2SI_type_node,
9645 tree_cons (NULL_TREE, signed_char_type_node,
9646 endlink));
9647
a3170dc6
AH
9648 /* The initialization of the simple binary and unary builtins is
9649 done in rs6000_common_init_builtins, but we have to enable the
9650 mask bits here manually because we have run out of `target_flags'
9651 bits. We really need to redesign this mask business. */
9652
9653 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9654 ARRAY_SIZE (bdesc_2arg),
9655 SPE_BUILTIN_EVADDW,
9656 SPE_BUILTIN_EVXOR);
9657 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9658 ARRAY_SIZE (bdesc_1arg),
9659 SPE_BUILTIN_EVABS,
9660 SPE_BUILTIN_EVSUBFUSIAAW);
9661 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9662 ARRAY_SIZE (bdesc_spe_predicates),
9663 SPE_BUILTIN_EVCMPEQ,
9664 SPE_BUILTIN_EVFSTSTLT);
9665 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9666 ARRAY_SIZE (bdesc_spe_evsel),
9667 SPE_BUILTIN_EVSEL_CMPGTS,
9668 SPE_BUILTIN_EVSEL_FSTSTEQ);
9669
36252949
AH
9670 (*lang_hooks.decls.pushdecl)
9671 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9672 opaque_V2SI_type_node));
9673
a3170dc6 9674 /* Initialize irregular SPE builtins. */
f676971a 9675
a3170dc6
AH
9676 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9677 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9678 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9679 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9680 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9681 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9682 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9683 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9684 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9685 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9686 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9687 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9688 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9689 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9690 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9691 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9692 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9693 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9694
9695 /* Loads. */
9696 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9697 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9698 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9699 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9700 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9701 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9702 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9703 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9704 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9705 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9706 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9707 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9708 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9709 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9710 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9711 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9712 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9713 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9714 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9715 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9716 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9717 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9718
9719 /* Predicates. */
9720 d = (struct builtin_description *) bdesc_spe_predicates;
9721 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9722 {
9723 tree type;
9724
9725 switch (insn_data[d->icode].operand[1].mode)
9726 {
9727 case V2SImode:
9728 type = int_ftype_int_v2si_v2si;
9729 break;
9730 case V2SFmode:
9731 type = int_ftype_int_v2sf_v2sf;
9732 break;
9733 default:
37409796 9734 gcc_unreachable ();
a3170dc6
AH
9735 }
9736
9737 def_builtin (d->mask, d->name, type, d->code);
9738 }
9739
9740 /* Evsel predicates. */
9741 d = (struct builtin_description *) bdesc_spe_evsel;
9742 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9743 {
9744 tree type;
9745
9746 switch (insn_data[d->icode].operand[1].mode)
9747 {
9748 case V2SImode:
9749 type = v2si_ftype_4_v2si;
9750 break;
9751 case V2SFmode:
9752 type = v2sf_ftype_4_v2sf;
9753 break;
9754 default:
37409796 9755 gcc_unreachable ();
a3170dc6
AH
9756 }
9757
9758 def_builtin (d->mask, d->name, type, d->code);
9759 }
9760}
9761
96038623
DE
9762static void
9763paired_init_builtins (void)
9764{
23a651fc 9765 const struct builtin_description *d;
96038623
DE
9766 size_t i;
9767 tree endlink = void_list_node;
9768
9769 tree int_ftype_int_v2sf_v2sf
9770 = build_function_type
9771 (integer_type_node,
9772 tree_cons (NULL_TREE, integer_type_node,
9773 tree_cons (NULL_TREE, V2SF_type_node,
9774 tree_cons (NULL_TREE, V2SF_type_node,
9775 endlink))));
9776 tree pcfloat_type_node =
9777 build_pointer_type (build_qualified_type
9778 (float_type_node, TYPE_QUAL_CONST));
9779
9780 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9781 long_integer_type_node,
9782 pcfloat_type_node,
9783 NULL_TREE);
9784 tree void_ftype_v2sf_long_pcfloat =
9785 build_function_type_list (void_type_node,
9786 V2SF_type_node,
9787 long_integer_type_node,
9788 pcfloat_type_node,
9789 NULL_TREE);
9790
9791
9792 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9793 PAIRED_BUILTIN_LX);
9794
9795
9796 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9797 PAIRED_BUILTIN_STX);
9798
9799 /* Predicates. */
23a651fc 9800 d = bdesc_paired_preds;
96038623
DE
9801 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9802 {
9803 tree type;
9804
9805 switch (insn_data[d->icode].operand[1].mode)
9806 {
9807 case V2SFmode:
9808 type = int_ftype_int_v2sf_v2sf;
9809 break;
9810 default:
9811 gcc_unreachable ();
9812 }
9813
9814 def_builtin (d->mask, d->name, type, d->code);
9815 }
9816}
9817
a3170dc6 9818static void
863d938c 9819altivec_init_builtins (void)
a3170dc6 9820{
586de218
KG
9821 const struct builtin_description *d;
9822 const struct builtin_description_predicates *dp;
a3170dc6 9823 size_t i;
7a4eca66
DE
9824 tree ftype;
9825
a3170dc6
AH
9826 tree pfloat_type_node = build_pointer_type (float_type_node);
9827 tree pint_type_node = build_pointer_type (integer_type_node);
9828 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9829 tree pchar_type_node = build_pointer_type (char_type_node);
9830
9831 tree pvoid_type_node = build_pointer_type (void_type_node);
9832
0dbc3651
ZW
9833 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9834 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9835 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9836 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9837
9838 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9839
58646b77
PB
9840 tree int_ftype_opaque
9841 = build_function_type_list (integer_type_node,
9842 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
9843 tree opaque_ftype_opaque
9844 = build_function_type (integer_type_node,
9845 NULL_TREE);
58646b77
PB
9846 tree opaque_ftype_opaque_int
9847 = build_function_type_list (opaque_V4SI_type_node,
9848 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9849 tree opaque_ftype_opaque_opaque_int
9850 = build_function_type_list (opaque_V4SI_type_node,
9851 opaque_V4SI_type_node, opaque_V4SI_type_node,
9852 integer_type_node, NULL_TREE);
9853 tree int_ftype_int_opaque_opaque
9854 = build_function_type_list (integer_type_node,
9855 integer_type_node, opaque_V4SI_type_node,
9856 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9857 tree int_ftype_int_v4si_v4si
9858 = build_function_type_list (integer_type_node,
9859 integer_type_node, V4SI_type_node,
9860 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9861 tree v4sf_ftype_pcfloat
9862 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9863 tree void_ftype_pfloat_v4sf
b4de2f7d 9864 = build_function_type_list (void_type_node,
a3170dc6 9865 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9866 tree v4si_ftype_pcint
9867 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9868 tree void_ftype_pint_v4si
b4de2f7d
AH
9869 = build_function_type_list (void_type_node,
9870 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9871 tree v8hi_ftype_pcshort
9872 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9873 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9874 = build_function_type_list (void_type_node,
9875 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9876 tree v16qi_ftype_pcchar
9877 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9878 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9879 = build_function_type_list (void_type_node,
9880 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9881 tree void_ftype_v4si
b4de2f7d 9882 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9883 tree v8hi_ftype_void
9884 = build_function_type (V8HI_type_node, void_list_node);
9885 tree void_ftype_void
9886 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9887 tree void_ftype_int
9888 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9889
58646b77
PB
9890 tree opaque_ftype_long_pcvoid
9891 = build_function_type_list (opaque_V4SI_type_node,
9892 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9893 tree v16qi_ftype_long_pcvoid
a3170dc6 9894 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9895 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9896 tree v8hi_ftype_long_pcvoid
a3170dc6 9897 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9898 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9899 tree v4si_ftype_long_pcvoid
a3170dc6 9900 = build_function_type_list (V4SI_type_node,
b4a62fa0 9901 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9902
58646b77
PB
9903 tree void_ftype_opaque_long_pvoid
9904 = build_function_type_list (void_type_node,
9905 opaque_V4SI_type_node, long_integer_type_node,
9906 pvoid_type_node, NULL_TREE);
b4a62fa0 9907 tree void_ftype_v4si_long_pvoid
b4de2f7d 9908 = build_function_type_list (void_type_node,
b4a62fa0 9909 V4SI_type_node, long_integer_type_node,
b4de2f7d 9910 pvoid_type_node, NULL_TREE);
b4a62fa0 9911 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9912 = build_function_type_list (void_type_node,
b4a62fa0 9913 V16QI_type_node, long_integer_type_node,
b4de2f7d 9914 pvoid_type_node, NULL_TREE);
b4a62fa0 9915 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9916 = build_function_type_list (void_type_node,
b4a62fa0 9917 V8HI_type_node, long_integer_type_node,
b4de2f7d 9918 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9919 tree int_ftype_int_v8hi_v8hi
9920 = build_function_type_list (integer_type_node,
9921 integer_type_node, V8HI_type_node,
9922 V8HI_type_node, NULL_TREE);
9923 tree int_ftype_int_v16qi_v16qi
9924 = build_function_type_list (integer_type_node,
9925 integer_type_node, V16QI_type_node,
9926 V16QI_type_node, NULL_TREE);
9927 tree int_ftype_int_v4sf_v4sf
9928 = build_function_type_list (integer_type_node,
9929 integer_type_node, V4SF_type_node,
9930 V4SF_type_node, NULL_TREE);
9931 tree v4si_ftype_v4si
9932 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9933 tree v8hi_ftype_v8hi
9934 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9935 tree v16qi_ftype_v16qi
9936 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9937 tree v4sf_ftype_v4sf
9938 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9939 tree void_ftype_pcvoid_int_int
a3170dc6 9940 = build_function_type_list (void_type_node,
0dbc3651 9941 pcvoid_type_node, integer_type_node,
8bb418a3 9942 integer_type_node, NULL_TREE);
8bb418a3 9943
0dbc3651
ZW
9944 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9945 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9946 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9947 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9948 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9949 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9950 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9951 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9952 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9953 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9954 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9955 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9956 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9957 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9958 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9959 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9961 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9963 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9965 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9967 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9969 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9971 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9973 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9974 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9975 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9976 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9977 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9978 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9979 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9980 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9981 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9982 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9983 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9984 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9985 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9986 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9987 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9988 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9989 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9990
0b61703c
AP
9991 if (rs6000_cpu == PROCESSOR_CELL)
9992 {
9993 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
9994 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
9995 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
9996 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
9997
9998 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
9999 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
10000 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
10001 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
10002
10003 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
10004 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10005 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10006 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10007
10008 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10009 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10010 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10011 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10012 }
58646b77 10013 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
10014 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10015 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
10016
10017 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10018 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
10019 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10020 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
10021 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10022 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10023 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10024 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10025 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10026 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10027 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10028 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 10029
a3170dc6 10030 /* Add the DST variants. */
586de218 10031 d = bdesc_dst;
a3170dc6 10032 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 10033 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10034
10035 /* Initialize the predicates. */
586de218 10036 dp = bdesc_altivec_preds;
a3170dc6
AH
10037 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10038 {
10039 enum machine_mode mode1;
10040 tree type;
58646b77
PB
10041 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10042 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10043
58646b77
PB
10044 if (is_overloaded)
10045 mode1 = VOIDmode;
10046 else
10047 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10048
10049 switch (mode1)
10050 {
58646b77
PB
10051 case VOIDmode:
10052 type = int_ftype_int_opaque_opaque;
10053 break;
a3170dc6
AH
10054 case V4SImode:
10055 type = int_ftype_int_v4si_v4si;
10056 break;
10057 case V8HImode:
10058 type = int_ftype_int_v8hi_v8hi;
10059 break;
10060 case V16QImode:
10061 type = int_ftype_int_v16qi_v16qi;
10062 break;
10063 case V4SFmode:
10064 type = int_ftype_int_v4sf_v4sf;
10065 break;
10066 default:
37409796 10067 gcc_unreachable ();
a3170dc6 10068 }
f676971a 10069
a3170dc6
AH
10070 def_builtin (dp->mask, dp->name, type, dp->code);
10071 }
10072
10073 /* Initialize the abs* operators. */
586de218 10074 d = bdesc_abs;
a3170dc6
AH
10075 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10076 {
10077 enum machine_mode mode0;
10078 tree type;
10079
10080 mode0 = insn_data[d->icode].operand[0].mode;
10081
10082 switch (mode0)
10083 {
10084 case V4SImode:
10085 type = v4si_ftype_v4si;
10086 break;
10087 case V8HImode:
10088 type = v8hi_ftype_v8hi;
10089 break;
10090 case V16QImode:
10091 type = v16qi_ftype_v16qi;
10092 break;
10093 case V4SFmode:
10094 type = v4sf_ftype_v4sf;
10095 break;
10096 default:
37409796 10097 gcc_unreachable ();
a3170dc6 10098 }
f676971a 10099
a3170dc6
AH
10100 def_builtin (d->mask, d->name, type, d->code);
10101 }
7ccf35ed 10102
13c62176
DN
10103 if (TARGET_ALTIVEC)
10104 {
10105 tree decl;
10106
10107 /* Initialize target builtin that implements
10108 targetm.vectorize.builtin_mask_for_load. */
10109
c79efc4d
RÁE
10110 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10111 v16qi_ftype_long_pcvoid,
10112 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10113 BUILT_IN_MD, NULL, NULL_TREE);
10114 TREE_READONLY (decl) = 1;
13c62176
DN
10115 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10116 altivec_builtin_mask_for_load = decl;
13c62176 10117 }
7a4eca66
DE
10118
10119 /* Access to the vec_init patterns. */
10120 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10121 integer_type_node, integer_type_node,
10122 integer_type_node, NULL_TREE);
10123 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10124 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10125
10126 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10127 short_integer_type_node,
10128 short_integer_type_node,
10129 short_integer_type_node,
10130 short_integer_type_node,
10131 short_integer_type_node,
10132 short_integer_type_node,
10133 short_integer_type_node, NULL_TREE);
10134 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10135 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10136
10137 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10138 char_type_node, char_type_node,
10139 char_type_node, char_type_node,
10140 char_type_node, char_type_node,
10141 char_type_node, char_type_node,
10142 char_type_node, char_type_node,
10143 char_type_node, char_type_node,
10144 char_type_node, char_type_node,
10145 char_type_node, NULL_TREE);
10146 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10147 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10148
10149 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10150 float_type_node, float_type_node,
10151 float_type_node, NULL_TREE);
10152 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10153 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10154
10155 /* Access to the vec_set patterns. */
10156 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10157 intSI_type_node,
10158 integer_type_node, NULL_TREE);
10159 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10160 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10161
10162 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10163 intHI_type_node,
10164 integer_type_node, NULL_TREE);
10165 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10166 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10167
10168 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10169 intQI_type_node,
10170 integer_type_node, NULL_TREE);
10171 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10172 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10173
10174 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10175 float_type_node,
10176 integer_type_node, NULL_TREE);
10177 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10178 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10179
10180 /* Access to the vec_extract patterns. */
10181 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10182 integer_type_node, NULL_TREE);
10183 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10184 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10185
10186 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10187 integer_type_node, NULL_TREE);
10188 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10189 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10190
10191 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10192 integer_type_node, NULL_TREE);
10193 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10194 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10195
10196 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10197 integer_type_node, NULL_TREE);
10198 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10199 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10200}
10201
10202static void
863d938c 10203rs6000_common_init_builtins (void)
a3170dc6 10204{
586de218 10205 const struct builtin_description *d;
a3170dc6
AH
10206 size_t i;
10207
96038623
DE
10208 tree v2sf_ftype_v2sf_v2sf_v2sf
10209 = build_function_type_list (V2SF_type_node,
10210 V2SF_type_node, V2SF_type_node,
10211 V2SF_type_node, NULL_TREE);
10212
a3170dc6
AH
10213 tree v4sf_ftype_v4sf_v4sf_v16qi
10214 = build_function_type_list (V4SF_type_node,
10215 V4SF_type_node, V4SF_type_node,
10216 V16QI_type_node, NULL_TREE);
10217 tree v4si_ftype_v4si_v4si_v16qi
10218 = build_function_type_list (V4SI_type_node,
10219 V4SI_type_node, V4SI_type_node,
10220 V16QI_type_node, NULL_TREE);
10221 tree v8hi_ftype_v8hi_v8hi_v16qi
10222 = build_function_type_list (V8HI_type_node,
10223 V8HI_type_node, V8HI_type_node,
10224 V16QI_type_node, NULL_TREE);
10225 tree v16qi_ftype_v16qi_v16qi_v16qi
10226 = build_function_type_list (V16QI_type_node,
10227 V16QI_type_node, V16QI_type_node,
10228 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10229 tree v4si_ftype_int
10230 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10231 tree v8hi_ftype_int
10232 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10233 tree v16qi_ftype_int
10234 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10235 tree v8hi_ftype_v16qi
10236 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10237 tree v4sf_ftype_v4sf
10238 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10239
10240 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10241 = build_function_type_list (opaque_V2SI_type_node,
10242 opaque_V2SI_type_node,
10243 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10244
96038623 10245 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10246 = build_function_type_list (opaque_V2SF_type_node,
10247 opaque_V2SF_type_node,
10248 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10249
96038623
DE
10250 tree v2sf_ftype_v2sf_v2sf
10251 = build_function_type_list (V2SF_type_node,
10252 V2SF_type_node,
10253 V2SF_type_node, NULL_TREE);
10254
10255
a3170dc6 10256 tree v2si_ftype_int_int
2abe3e28 10257 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10258 integer_type_node, integer_type_node,
10259 NULL_TREE);
10260
58646b77
PB
10261 tree opaque_ftype_opaque
10262 = build_function_type_list (opaque_V4SI_type_node,
10263 opaque_V4SI_type_node, NULL_TREE);
10264
a3170dc6 10265 tree v2si_ftype_v2si
2abe3e28
AH
10266 = build_function_type_list (opaque_V2SI_type_node,
10267 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10268
96038623 10269 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10270 = build_function_type_list (opaque_V2SF_type_node,
10271 opaque_V2SF_type_node, NULL_TREE);
f676971a 10272
96038623
DE
10273 tree v2sf_ftype_v2sf
10274 = build_function_type_list (V2SF_type_node,
10275 V2SF_type_node, NULL_TREE);
10276
a3170dc6 10277 tree v2sf_ftype_v2si
2abe3e28
AH
10278 = build_function_type_list (opaque_V2SF_type_node,
10279 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10280
10281 tree v2si_ftype_v2sf
2abe3e28
AH
10282 = build_function_type_list (opaque_V2SI_type_node,
10283 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10284
10285 tree v2si_ftype_v2si_char
2abe3e28
AH
10286 = build_function_type_list (opaque_V2SI_type_node,
10287 opaque_V2SI_type_node,
10288 char_type_node, NULL_TREE);
a3170dc6
AH
10289
10290 tree v2si_ftype_int_char
2abe3e28 10291 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10292 integer_type_node, char_type_node, NULL_TREE);
10293
10294 tree v2si_ftype_char
2abe3e28
AH
10295 = build_function_type_list (opaque_V2SI_type_node,
10296 char_type_node, NULL_TREE);
a3170dc6
AH
10297
10298 tree int_ftype_int_int
10299 = build_function_type_list (integer_type_node,
10300 integer_type_node, integer_type_node,
10301 NULL_TREE);
95385cbb 10302
58646b77
PB
10303 tree opaque_ftype_opaque_opaque
10304 = build_function_type_list (opaque_V4SI_type_node,
10305 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10306 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10307 = build_function_type_list (V4SI_type_node,
10308 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10309 tree v4sf_ftype_v4si_int
b4de2f7d 10310 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10311 V4SI_type_node, integer_type_node, NULL_TREE);
10312 tree v4si_ftype_v4sf_int
b4de2f7d 10313 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10314 V4SF_type_node, integer_type_node, NULL_TREE);
10315 tree v4si_ftype_v4si_int
b4de2f7d 10316 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10317 V4SI_type_node, integer_type_node, NULL_TREE);
10318 tree v8hi_ftype_v8hi_int
b4de2f7d 10319 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10320 V8HI_type_node, integer_type_node, NULL_TREE);
10321 tree v16qi_ftype_v16qi_int
b4de2f7d 10322 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10323 V16QI_type_node, integer_type_node, NULL_TREE);
10324 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10325 = build_function_type_list (V16QI_type_node,
10326 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10327 integer_type_node, NULL_TREE);
10328 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10329 = build_function_type_list (V8HI_type_node,
10330 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10331 integer_type_node, NULL_TREE);
10332 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10333 = build_function_type_list (V4SI_type_node,
10334 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10335 integer_type_node, NULL_TREE);
10336 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10337 = build_function_type_list (V4SF_type_node,
10338 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10339 integer_type_node, NULL_TREE);
0ac081f6 10340 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10341 = build_function_type_list (V4SF_type_node,
10342 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10343 tree opaque_ftype_opaque_opaque_opaque
10344 = build_function_type_list (opaque_V4SI_type_node,
10345 opaque_V4SI_type_node, opaque_V4SI_type_node,
10346 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10347 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10348 = build_function_type_list (V4SF_type_node,
10349 V4SF_type_node, V4SF_type_node,
10350 V4SI_type_node, NULL_TREE);
2212663f 10351 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10352 = build_function_type_list (V4SF_type_node,
10353 V4SF_type_node, V4SF_type_node,
10354 V4SF_type_node, NULL_TREE);
f676971a 10355 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10356 = build_function_type_list (V4SI_type_node,
10357 V4SI_type_node, V4SI_type_node,
10358 V4SI_type_node, NULL_TREE);
0ac081f6 10359 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10360 = build_function_type_list (V8HI_type_node,
10361 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10362 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10363 = build_function_type_list (V8HI_type_node,
10364 V8HI_type_node, V8HI_type_node,
10365 V8HI_type_node, NULL_TREE);
c4ad648e 10366 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10367 = build_function_type_list (V4SI_type_node,
10368 V8HI_type_node, V8HI_type_node,
10369 V4SI_type_node, NULL_TREE);
c4ad648e 10370 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10371 = build_function_type_list (V4SI_type_node,
10372 V16QI_type_node, V16QI_type_node,
10373 V4SI_type_node, NULL_TREE);
0ac081f6 10374 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10375 = build_function_type_list (V16QI_type_node,
10376 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10377 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10378 = build_function_type_list (V4SI_type_node,
10379 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10380 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10381 = build_function_type_list (V8HI_type_node,
10382 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10383 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10384 = build_function_type_list (V4SI_type_node,
10385 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10386 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10387 = build_function_type_list (V8HI_type_node,
10388 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10389 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10390 = build_function_type_list (V16QI_type_node,
10391 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10392 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10393 = build_function_type_list (V4SI_type_node,
10394 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10395 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10396 = build_function_type_list (V4SI_type_node,
10397 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10398 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10399 = build_function_type_list (V4SI_type_node,
10400 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10401 tree v4si_ftype_v8hi
10402 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10403 tree int_ftype_v4si_v4si
10404 = build_function_type_list (integer_type_node,
10405 V4SI_type_node, V4SI_type_node, NULL_TREE);
10406 tree int_ftype_v4sf_v4sf
10407 = build_function_type_list (integer_type_node,
10408 V4SF_type_node, V4SF_type_node, NULL_TREE);
10409 tree int_ftype_v16qi_v16qi
10410 = build_function_type_list (integer_type_node,
10411 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10412 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10413 = build_function_type_list (integer_type_node,
10414 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10415
6f317ef3 10416 /* Add the simple ternary operators. */
586de218 10417 d = bdesc_3arg;
ca7558fc 10418 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10419 {
2212663f
DB
10420 enum machine_mode mode0, mode1, mode2, mode3;
10421 tree type;
58646b77
PB
10422 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10423 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10424
58646b77
PB
10425 if (is_overloaded)
10426 {
10427 mode0 = VOIDmode;
10428 mode1 = VOIDmode;
10429 mode2 = VOIDmode;
10430 mode3 = VOIDmode;
10431 }
10432 else
10433 {
10434 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10435 continue;
f676971a 10436
58646b77
PB
10437 mode0 = insn_data[d->icode].operand[0].mode;
10438 mode1 = insn_data[d->icode].operand[1].mode;
10439 mode2 = insn_data[d->icode].operand[2].mode;
10440 mode3 = insn_data[d->icode].operand[3].mode;
10441 }
bb8df8a6 10442
2212663f
DB
10443 /* When all four are of the same mode. */
10444 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10445 {
10446 switch (mode0)
10447 {
58646b77
PB
10448 case VOIDmode:
10449 type = opaque_ftype_opaque_opaque_opaque;
10450 break;
617e0e1d
DB
10451 case V4SImode:
10452 type = v4si_ftype_v4si_v4si_v4si;
10453 break;
2212663f
DB
10454 case V4SFmode:
10455 type = v4sf_ftype_v4sf_v4sf_v4sf;
10456 break;
10457 case V8HImode:
10458 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10459 break;
2212663f
DB
10460 case V16QImode:
10461 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10462 break;
96038623
DE
10463 case V2SFmode:
10464 type = v2sf_ftype_v2sf_v2sf_v2sf;
10465 break;
2212663f 10466 default:
37409796 10467 gcc_unreachable ();
2212663f
DB
10468 }
10469 }
10470 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10471 {
2212663f
DB
10472 switch (mode0)
10473 {
10474 case V4SImode:
10475 type = v4si_ftype_v4si_v4si_v16qi;
10476 break;
10477 case V4SFmode:
10478 type = v4sf_ftype_v4sf_v4sf_v16qi;
10479 break;
10480 case V8HImode:
10481 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10482 break;
2212663f
DB
10483 case V16QImode:
10484 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10485 break;
2212663f 10486 default:
37409796 10487 gcc_unreachable ();
2212663f
DB
10488 }
10489 }
f676971a 10490 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10491 && mode3 == V4SImode)
24408032 10492 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10493 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10494 && mode3 == V4SImode)
24408032 10495 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10496 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10497 && mode3 == V4SImode)
24408032
AH
10498 type = v4sf_ftype_v4sf_v4sf_v4si;
10499
a7b376ee 10500 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10501 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10502 && mode3 == QImode)
b9e4e5d1 10503 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10504
a7b376ee 10505 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10506 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10507 && mode3 == QImode)
b9e4e5d1 10508 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10509
a7b376ee 10510 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10511 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10512 && mode3 == QImode)
b9e4e5d1 10513 type = v4si_ftype_v4si_v4si_int;
24408032 10514
a7b376ee 10515 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10516 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10517 && mode3 == QImode)
b9e4e5d1 10518 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10519
2212663f 10520 else
37409796 10521 gcc_unreachable ();
2212663f
DB
10522
10523 def_builtin (d->mask, d->name, type, d->code);
10524 }
10525
0ac081f6 10526 /* Add the simple binary operators. */
00b960c7 10527 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10528 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10529 {
10530 enum machine_mode mode0, mode1, mode2;
10531 tree type;
58646b77
PB
10532 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10533 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10534
58646b77
PB
10535 if (is_overloaded)
10536 {
10537 mode0 = VOIDmode;
10538 mode1 = VOIDmode;
10539 mode2 = VOIDmode;
10540 }
10541 else
bb8df8a6 10542 {
58646b77
PB
10543 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10544 continue;
f676971a 10545
58646b77
PB
10546 mode0 = insn_data[d->icode].operand[0].mode;
10547 mode1 = insn_data[d->icode].operand[1].mode;
10548 mode2 = insn_data[d->icode].operand[2].mode;
10549 }
0ac081f6
AH
10550
10551 /* When all three operands are of the same mode. */
10552 if (mode0 == mode1 && mode1 == mode2)
10553 {
10554 switch (mode0)
10555 {
58646b77
PB
10556 case VOIDmode:
10557 type = opaque_ftype_opaque_opaque;
10558 break;
0ac081f6
AH
10559 case V4SFmode:
10560 type = v4sf_ftype_v4sf_v4sf;
10561 break;
10562 case V4SImode:
10563 type = v4si_ftype_v4si_v4si;
10564 break;
10565 case V16QImode:
10566 type = v16qi_ftype_v16qi_v16qi;
10567 break;
10568 case V8HImode:
10569 type = v8hi_ftype_v8hi_v8hi;
10570 break;
a3170dc6
AH
10571 case V2SImode:
10572 type = v2si_ftype_v2si_v2si;
10573 break;
96038623
DE
10574 case V2SFmode:
10575 if (TARGET_PAIRED_FLOAT)
10576 type = v2sf_ftype_v2sf_v2sf;
10577 else
10578 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10579 break;
10580 case SImode:
10581 type = int_ftype_int_int;
10582 break;
0ac081f6 10583 default:
37409796 10584 gcc_unreachable ();
0ac081f6
AH
10585 }
10586 }
10587
10588 /* A few other combos we really don't want to do manually. */
10589
10590 /* vint, vfloat, vfloat. */
10591 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10592 type = v4si_ftype_v4sf_v4sf;
10593
10594 /* vshort, vchar, vchar. */
10595 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10596 type = v8hi_ftype_v16qi_v16qi;
10597
10598 /* vint, vshort, vshort. */
10599 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10600 type = v4si_ftype_v8hi_v8hi;
10601
10602 /* vshort, vint, vint. */
10603 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10604 type = v8hi_ftype_v4si_v4si;
10605
10606 /* vchar, vshort, vshort. */
10607 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10608 type = v16qi_ftype_v8hi_v8hi;
10609
10610 /* vint, vchar, vint. */
10611 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10612 type = v4si_ftype_v16qi_v4si;
10613
fa066a23
AH
10614 /* vint, vchar, vchar. */
10615 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10616 type = v4si_ftype_v16qi_v16qi;
10617
0ac081f6
AH
10618 /* vint, vshort, vint. */
10619 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10620 type = v4si_ftype_v8hi_v4si;
f676971a 10621
a7b376ee 10622 /* vint, vint, 5-bit literal. */
2212663f 10623 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10624 type = v4si_ftype_v4si_int;
f676971a 10625
a7b376ee 10626 /* vshort, vshort, 5-bit literal. */
2212663f 10627 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10628 type = v8hi_ftype_v8hi_int;
f676971a 10629
a7b376ee 10630 /* vchar, vchar, 5-bit literal. */
2212663f 10631 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10632 type = v16qi_ftype_v16qi_int;
0ac081f6 10633
a7b376ee 10634 /* vfloat, vint, 5-bit literal. */
617e0e1d 10635 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10636 type = v4sf_ftype_v4si_int;
f676971a 10637
a7b376ee 10638 /* vint, vfloat, 5-bit literal. */
617e0e1d 10639 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10640 type = v4si_ftype_v4sf_int;
617e0e1d 10641
a3170dc6
AH
10642 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10643 type = v2si_ftype_int_int;
10644
10645 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10646 type = v2si_ftype_v2si_char;
10647
10648 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10649 type = v2si_ftype_int_char;
10650
37409796 10651 else
0ac081f6 10652 {
37409796
NS
10653 /* int, x, x. */
10654 gcc_assert (mode0 == SImode);
0ac081f6
AH
10655 switch (mode1)
10656 {
10657 case V4SImode:
10658 type = int_ftype_v4si_v4si;
10659 break;
10660 case V4SFmode:
10661 type = int_ftype_v4sf_v4sf;
10662 break;
10663 case V16QImode:
10664 type = int_ftype_v16qi_v16qi;
10665 break;
10666 case V8HImode:
10667 type = int_ftype_v8hi_v8hi;
10668 break;
10669 default:
37409796 10670 gcc_unreachable ();
0ac081f6
AH
10671 }
10672 }
10673
2212663f
DB
10674 def_builtin (d->mask, d->name, type, d->code);
10675 }
24408032 10676
2212663f
DB
10677 /* Add the simple unary operators. */
10678 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10679 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10680 {
10681 enum machine_mode mode0, mode1;
10682 tree type;
58646b77
PB
10683 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10684 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10685
10686 if (is_overloaded)
10687 {
10688 mode0 = VOIDmode;
10689 mode1 = VOIDmode;
10690 }
10691 else
10692 {
10693 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10694 continue;
bb8df8a6 10695
58646b77
PB
10696 mode0 = insn_data[d->icode].operand[0].mode;
10697 mode1 = insn_data[d->icode].operand[1].mode;
10698 }
2212663f
DB
10699
10700 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10701 type = v4si_ftype_int;
2212663f 10702 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10703 type = v8hi_ftype_int;
2212663f 10704 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10705 type = v16qi_ftype_int;
58646b77
PB
10706 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10707 type = opaque_ftype_opaque;
617e0e1d
DB
10708 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10709 type = v4sf_ftype_v4sf;
20e26713
AH
10710 else if (mode0 == V8HImode && mode1 == V16QImode)
10711 type = v8hi_ftype_v16qi;
10712 else if (mode0 == V4SImode && mode1 == V8HImode)
10713 type = v4si_ftype_v8hi;
a3170dc6
AH
10714 else if (mode0 == V2SImode && mode1 == V2SImode)
10715 type = v2si_ftype_v2si;
10716 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10717 {
10718 if (TARGET_PAIRED_FLOAT)
10719 type = v2sf_ftype_v2sf;
10720 else
10721 type = v2sf_ftype_v2sf_spe;
10722 }
a3170dc6
AH
10723 else if (mode0 == V2SFmode && mode1 == V2SImode)
10724 type = v2sf_ftype_v2si;
10725 else if (mode0 == V2SImode && mode1 == V2SFmode)
10726 type = v2si_ftype_v2sf;
10727 else if (mode0 == V2SImode && mode1 == QImode)
10728 type = v2si_ftype_char;
2212663f 10729 else
37409796 10730 gcc_unreachable ();
2212663f 10731
0ac081f6
AH
10732 def_builtin (d->mask, d->name, type, d->code);
10733 }
10734}
10735
c15c90bb
ZW
10736static void
10737rs6000_init_libfuncs (void)
10738{
602ea4d3
JJ
10739 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10740 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10741 {
602ea4d3
JJ
10742 /* AIX library routines for float->int conversion. */
10743 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10744 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10745 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10746 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10747 }
c15c90bb 10748
602ea4d3 10749 if (!TARGET_IEEEQUAD)
98c41d98 10750 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10751 if (!TARGET_XL_COMPAT)
10752 {
10753 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10754 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10755 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10756 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10757
17caeff2 10758 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10759 {
10760 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10761 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10762 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10763 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10764 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10765 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10766 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10767
10768 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10769 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10770 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10771 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10772 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10773 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10774 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10775 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10776 }
b26941b4
JM
10777
10778 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10779 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10780 }
10781 else
10782 {
10783 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10784 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10785 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10786 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10787 }
c9034561 10788 else
c15c90bb 10789 {
c9034561 10790 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10791
10792 set_optab_libfunc (add_optab, TFmode, "_q_add");
10793 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10794 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10795 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10796 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10797 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10798 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10799
c9034561
ZW
10800 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10801 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10802 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10803 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10804 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10805 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10806
85363ca0
ZW
10807 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10808 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10809 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10810 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10811 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10812 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10813 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10814 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10815 }
10816}
fba73eb1
DE
10817
10818\f
10819/* Expand a block clear operation, and return 1 if successful. Return 0
10820 if we should let the compiler generate normal code.
10821
10822 operands[0] is the destination
10823 operands[1] is the length
57e84f18 10824 operands[3] is the alignment */
fba73eb1
DE
10825
10826int
10827expand_block_clear (rtx operands[])
10828{
10829 rtx orig_dest = operands[0];
10830 rtx bytes_rtx = operands[1];
57e84f18 10831 rtx align_rtx = operands[3];
5514620a
GK
10832 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10833 HOST_WIDE_INT align;
10834 HOST_WIDE_INT bytes;
fba73eb1
DE
10835 int offset;
10836 int clear_bytes;
5514620a 10837 int clear_step;
fba73eb1
DE
10838
10839 /* If this is not a fixed size move, just call memcpy */
10840 if (! constp)
10841 return 0;
10842
37409796
NS
10843 /* This must be a fixed size alignment */
10844 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10845 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10846
10847 /* Anything to clear? */
10848 bytes = INTVAL (bytes_rtx);
10849 if (bytes <= 0)
10850 return 1;
10851
5514620a
GK
10852 /* Use the builtin memset after a point, to avoid huge code bloat.
10853 When optimize_size, avoid any significant code bloat; calling
10854 memset is about 4 instructions, so allow for one instruction to
10855 load zero and three to do clearing. */
10856 if (TARGET_ALTIVEC && align >= 128)
10857 clear_step = 16;
10858 else if (TARGET_POWERPC64 && align >= 32)
10859 clear_step = 8;
21d818ff
NF
10860 else if (TARGET_SPE && align >= 64)
10861 clear_step = 8;
5514620a
GK
10862 else
10863 clear_step = 4;
fba73eb1 10864
5514620a
GK
10865 if (optimize_size && bytes > 3 * clear_step)
10866 return 0;
10867 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10868 return 0;
10869
10870 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10871 {
fba73eb1
DE
10872 enum machine_mode mode = BLKmode;
10873 rtx dest;
f676971a 10874
5514620a
GK
10875 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10876 {
10877 clear_bytes = 16;
10878 mode = V4SImode;
10879 }
21d818ff
NF
10880 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10881 {
10882 clear_bytes = 8;
10883 mode = V2SImode;
10884 }
5514620a 10885 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10886 /* 64-bit loads and stores require word-aligned
10887 displacements. */
10888 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10889 {
10890 clear_bytes = 8;
10891 mode = DImode;
fba73eb1 10892 }
5514620a 10893 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10894 { /* move 4 bytes */
10895 clear_bytes = 4;
10896 mode = SImode;
fba73eb1 10897 }
ec53fc93 10898 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10899 { /* move 2 bytes */
10900 clear_bytes = 2;
10901 mode = HImode;
fba73eb1
DE
10902 }
10903 else /* move 1 byte at a time */
10904 {
10905 clear_bytes = 1;
10906 mode = QImode;
fba73eb1 10907 }
f676971a 10908
fba73eb1 10909 dest = adjust_address (orig_dest, mode, offset);
f676971a 10910
5514620a 10911 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10912 }
10913
10914 return 1;
10915}
10916
35aff10b 10917\f
7e69e155
MM
10918/* Expand a block move operation, and return 1 if successful. Return 0
10919 if we should let the compiler generate normal code.
10920
10921 operands[0] is the destination
10922 operands[1] is the source
10923 operands[2] is the length
10924 operands[3] is the alignment */
10925
3933e0e1
MM
10926#define MAX_MOVE_REG 4
10927
7e69e155 10928int
a2369ed3 10929expand_block_move (rtx operands[])
7e69e155 10930{
b6c9286a
MM
10931 rtx orig_dest = operands[0];
10932 rtx orig_src = operands[1];
7e69e155 10933 rtx bytes_rtx = operands[2];
7e69e155 10934 rtx align_rtx = operands[3];
3933e0e1 10935 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10936 int align;
3933e0e1
MM
10937 int bytes;
10938 int offset;
7e69e155 10939 int move_bytes;
cabfd258
GK
10940 rtx stores[MAX_MOVE_REG];
10941 int num_reg = 0;
7e69e155 10942
3933e0e1 10943 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10944 if (! constp)
3933e0e1
MM
10945 return 0;
10946
37409796
NS
10947 /* This must be a fixed size alignment */
10948 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10949 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10950
7e69e155 10951 /* Anything to move? */
3933e0e1
MM
10952 bytes = INTVAL (bytes_rtx);
10953 if (bytes <= 0)
7e69e155
MM
10954 return 1;
10955
ea9982a8 10956 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10957 reg_parm_stack_space. */
ea9982a8 10958 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10959 return 0;
10960
cabfd258 10961 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10962 {
cabfd258 10963 union {
70128ad9 10964 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10965 rtx (*mov) (rtx, rtx);
cabfd258
GK
10966 } gen_func;
10967 enum machine_mode mode = BLKmode;
10968 rtx src, dest;
f676971a 10969
5514620a
GK
10970 /* Altivec first, since it will be faster than a string move
10971 when it applies, and usually not significantly larger. */
10972 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10973 {
10974 move_bytes = 16;
10975 mode = V4SImode;
10976 gen_func.mov = gen_movv4si;
10977 }
21d818ff
NF
10978 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10979 {
10980 move_bytes = 8;
10981 mode = V2SImode;
10982 gen_func.mov = gen_movv2si;
10983 }
5514620a 10984 else if (TARGET_STRING
cabfd258
GK
10985 && bytes > 24 /* move up to 32 bytes at a time */
10986 && ! fixed_regs[5]
10987 && ! fixed_regs[6]
10988 && ! fixed_regs[7]
10989 && ! fixed_regs[8]
10990 && ! fixed_regs[9]
10991 && ! fixed_regs[10]
10992 && ! fixed_regs[11]
10993 && ! fixed_regs[12])
7e69e155 10994 {
cabfd258 10995 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10996 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10997 }
10998 else if (TARGET_STRING
10999 && bytes > 16 /* move up to 24 bytes at a time */
11000 && ! fixed_regs[5]
11001 && ! fixed_regs[6]
11002 && ! fixed_regs[7]
11003 && ! fixed_regs[8]
11004 && ! fixed_regs[9]
11005 && ! fixed_regs[10])
11006 {
11007 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 11008 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
11009 }
11010 else if (TARGET_STRING
11011 && bytes > 8 /* move up to 16 bytes at a time */
11012 && ! fixed_regs[5]
11013 && ! fixed_regs[6]
11014 && ! fixed_regs[7]
11015 && ! fixed_regs[8])
11016 {
11017 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 11018 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
11019 }
11020 else if (bytes >= 8 && TARGET_POWERPC64
11021 /* 64-bit loads and stores require word-aligned
11022 displacements. */
fba73eb1 11023 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
11024 {
11025 move_bytes = 8;
11026 mode = DImode;
11027 gen_func.mov = gen_movdi;
11028 }
11029 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11030 { /* move up to 8 bytes at a time */
11031 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 11032 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 11033 }
cd7d9ca4 11034 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11035 { /* move 4 bytes */
11036 move_bytes = 4;
11037 mode = SImode;
11038 gen_func.mov = gen_movsi;
11039 }
ec53fc93 11040 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11041 { /* move 2 bytes */
11042 move_bytes = 2;
11043 mode = HImode;
11044 gen_func.mov = gen_movhi;
11045 }
11046 else if (TARGET_STRING && bytes > 1)
11047 { /* move up to 4 bytes at a time */
11048 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11049 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11050 }
11051 else /* move 1 byte at a time */
11052 {
11053 move_bytes = 1;
11054 mode = QImode;
11055 gen_func.mov = gen_movqi;
11056 }
f676971a 11057
cabfd258
GK
11058 src = adjust_address (orig_src, mode, offset);
11059 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11060
11061 if (mode != BLKmode)
cabfd258
GK
11062 {
11063 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11064
cabfd258
GK
11065 emit_insn ((*gen_func.mov) (tmp_reg, src));
11066 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11067 }
3933e0e1 11068
cabfd258
GK
11069 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11070 {
11071 int i;
11072 for (i = 0; i < num_reg; i++)
11073 emit_insn (stores[i]);
11074 num_reg = 0;
11075 }
35aff10b 11076
cabfd258 11077 if (mode == BLKmode)
7e69e155 11078 {
70128ad9 11079 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11080 patterns require zero offset. */
11081 if (!REG_P (XEXP (src, 0)))
b6c9286a 11082 {
cabfd258
GK
11083 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11084 src = replace_equiv_address (src, src_reg);
b6c9286a 11085 }
cabfd258 11086 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11087
cabfd258 11088 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11089 {
cabfd258
GK
11090 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11091 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11092 }
cabfd258 11093 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11094
70128ad9 11095 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11096 GEN_INT (move_bytes & 31),
11097 align_rtx));
7e69e155 11098 }
7e69e155
MM
11099 }
11100
11101 return 1;
11102}
11103
d62294f5 11104\f
9caa3eb2
DE
11105/* Return a string to perform a load_multiple operation.
11106 operands[0] is the vector.
11107 operands[1] is the source address.
11108 operands[2] is the first destination register. */
11109
11110const char *
a2369ed3 11111rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11112{
11113 /* We have to handle the case where the pseudo used to contain the address
11114 is assigned to one of the output registers. */
11115 int i, j;
11116 int words = XVECLEN (operands[0], 0);
11117 rtx xop[10];
11118
11119 if (XVECLEN (operands[0], 0) == 1)
11120 return "{l|lwz} %2,0(%1)";
11121
11122 for (i = 0; i < words; i++)
11123 if (refers_to_regno_p (REGNO (operands[2]) + i,
11124 REGNO (operands[2]) + i + 1, operands[1], 0))
11125 {
11126 if (i == words-1)
11127 {
11128 xop[0] = GEN_INT (4 * (words-1));
11129 xop[1] = operands[1];
11130 xop[2] = operands[2];
11131 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11132 return "";
11133 }
11134 else if (i == 0)
11135 {
11136 xop[0] = GEN_INT (4 * (words-1));
11137 xop[1] = operands[1];
11138 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11139 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11140 return "";
11141 }
11142 else
11143 {
11144 for (j = 0; j < words; j++)
11145 if (j != i)
11146 {
11147 xop[0] = GEN_INT (j * 4);
11148 xop[1] = operands[1];
11149 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11150 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11151 }
11152 xop[0] = GEN_INT (i * 4);
11153 xop[1] = operands[1];
11154 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11155 return "";
11156 }
11157 }
11158
11159 return "{lsi|lswi} %2,%1,%N0";
11160}
11161
9878760c 11162\f
a4f6c312
SS
11163/* A validation routine: say whether CODE, a condition code, and MODE
11164 match. The other alternatives either don't make sense or should
11165 never be generated. */
39a10a29 11166
48d72335 11167void
a2369ed3 11168validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11169{
37409796
NS
11170 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11171 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11172 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11173
11174 /* These don't make sense. */
37409796
NS
11175 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11176 || mode != CCUNSmode);
39a10a29 11177
37409796
NS
11178 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11179 || mode == CCUNSmode);
39a10a29 11180
37409796
NS
11181 gcc_assert (mode == CCFPmode
11182 || (code != ORDERED && code != UNORDERED
11183 && code != UNEQ && code != LTGT
11184 && code != UNGT && code != UNLT
11185 && code != UNGE && code != UNLE));
f676971a
EC
11186
11187 /* These should never be generated except for
bc9ec0e0 11188 flag_finite_math_only. */
37409796
NS
11189 gcc_assert (mode != CCFPmode
11190 || flag_finite_math_only
11191 || (code != LE && code != GE
11192 && code != UNEQ && code != LTGT
11193 && code != UNGT && code != UNLT));
39a10a29
GK
11194
11195 /* These are invalid; the information is not there. */
37409796 11196 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11197}
11198
9878760c
RK
11199\f
11200/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11201 mask required to convert the result of a rotate insn into a shift
b1765bde 11202 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11203
11204int
a2369ed3 11205includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11206{
e2c953b6
DE
11207 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11208
11209 shift_mask <<= INTVAL (shiftop);
9878760c 11210
b1765bde 11211 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11212}
11213
11214/* Similar, but for right shift. */
11215
11216int
a2369ed3 11217includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11218{
a7653a2c 11219 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11220
11221 shift_mask >>= INTVAL (shiftop);
11222
b1765bde 11223 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11224}
11225
c5059423
AM
11226/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11227 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11228 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11229
11230int
a2369ed3 11231includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11232{
c5059423
AM
11233 if (GET_CODE (andop) == CONST_INT)
11234 {
02071907 11235 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11236
c5059423 11237 c = INTVAL (andop);
02071907 11238 if (c == 0 || c == ~0)
c5059423 11239 return 0;
e2c953b6 11240
02071907 11241 shift_mask = ~0;
c5059423
AM
11242 shift_mask <<= INTVAL (shiftop);
11243
b6d08ca1 11244 /* Find the least significant one bit. */
c5059423
AM
11245 lsb = c & -c;
11246
11247 /* It must coincide with the LSB of the shift mask. */
11248 if (-lsb != shift_mask)
11249 return 0;
e2c953b6 11250
c5059423
AM
11251 /* Invert to look for the next transition (if any). */
11252 c = ~c;
11253
11254 /* Remove the low group of ones (originally low group of zeros). */
11255 c &= -lsb;
11256
11257 /* Again find the lsb, and check we have all 1's above. */
11258 lsb = c & -c;
11259 return c == -lsb;
11260 }
11261 else if (GET_CODE (andop) == CONST_DOUBLE
11262 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11263 {
02071907
AM
11264 HOST_WIDE_INT low, high, lsb;
11265 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11266
11267 low = CONST_DOUBLE_LOW (andop);
11268 if (HOST_BITS_PER_WIDE_INT < 64)
11269 high = CONST_DOUBLE_HIGH (andop);
11270
11271 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11272 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11273 return 0;
11274
11275 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11276 {
02071907 11277 shift_mask_high = ~0;
c5059423
AM
11278 if (INTVAL (shiftop) > 32)
11279 shift_mask_high <<= INTVAL (shiftop) - 32;
11280
11281 lsb = high & -high;
11282
11283 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11284 return 0;
11285
11286 high = ~high;
11287 high &= -lsb;
11288
11289 lsb = high & -high;
11290 return high == -lsb;
11291 }
11292
02071907 11293 shift_mask_low = ~0;
c5059423
AM
11294 shift_mask_low <<= INTVAL (shiftop);
11295
11296 lsb = low & -low;
11297
11298 if (-lsb != shift_mask_low)
11299 return 0;
11300
11301 if (HOST_BITS_PER_WIDE_INT < 64)
11302 high = ~high;
11303 low = ~low;
11304 low &= -lsb;
11305
11306 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11307 {
11308 lsb = high & -high;
11309 return high == -lsb;
11310 }
11311
11312 lsb = low & -low;
11313 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11314 }
11315 else
11316 return 0;
11317}
e2c953b6 11318
c5059423
AM
11319/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11320 to perform a left shift. It must have SHIFTOP or more least
c1207243 11321 significant 0's, with the remainder of the word 1's. */
e2c953b6 11322
c5059423 11323int
a2369ed3 11324includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11325{
e2c953b6 11326 if (GET_CODE (andop) == CONST_INT)
c5059423 11327 {
02071907 11328 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11329
02071907 11330 shift_mask = ~0;
c5059423
AM
11331 shift_mask <<= INTVAL (shiftop);
11332 c = INTVAL (andop);
11333
c1207243 11334 /* Find the least significant one bit. */
c5059423
AM
11335 lsb = c & -c;
11336
11337 /* It must be covered by the shift mask.
a4f6c312 11338 This test also rejects c == 0. */
c5059423
AM
11339 if ((lsb & shift_mask) == 0)
11340 return 0;
11341
11342 /* Check we have all 1's above the transition, and reject all 1's. */
11343 return c == -lsb && lsb != 1;
11344 }
11345 else if (GET_CODE (andop) == CONST_DOUBLE
11346 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11347 {
02071907 11348 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11349
11350 low = CONST_DOUBLE_LOW (andop);
11351
11352 if (HOST_BITS_PER_WIDE_INT < 64)
11353 {
02071907 11354 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11355
11356 high = CONST_DOUBLE_HIGH (andop);
11357
11358 if (low == 0)
11359 {
02071907 11360 shift_mask_high = ~0;
c5059423
AM
11361 if (INTVAL (shiftop) > 32)
11362 shift_mask_high <<= INTVAL (shiftop) - 32;
11363
11364 lsb = high & -high;
11365
11366 if ((lsb & shift_mask_high) == 0)
11367 return 0;
11368
11369 return high == -lsb;
11370 }
11371 if (high != ~0)
11372 return 0;
11373 }
11374
02071907 11375 shift_mask_low = ~0;
c5059423
AM
11376 shift_mask_low <<= INTVAL (shiftop);
11377
11378 lsb = low & -low;
11379
11380 if ((lsb & shift_mask_low) == 0)
11381 return 0;
11382
11383 return low == -lsb && lsb != 1;
11384 }
e2c953b6 11385 else
c5059423 11386 return 0;
9878760c 11387}
35068b43 11388
11ac38b2
DE
11389/* Return 1 if operands will generate a valid arguments to rlwimi
11390instruction for insert with right shift in 64-bit mode. The mask may
11391not start on the first bit or stop on the last bit because wrap-around
11392effects of instruction do not correspond to semantics of RTL insn. */
11393
11394int
11395insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11396{
429ec7dc
DE
11397 if (INTVAL (startop) > 32
11398 && INTVAL (startop) < 64
11399 && INTVAL (sizeop) > 1
11400 && INTVAL (sizeop) + INTVAL (startop) < 64
11401 && INTVAL (shiftop) > 0
11402 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11403 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11404 return 1;
11405
11406 return 0;
11407}
11408
35068b43 11409/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11410 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11411
11412int
a2369ed3 11413registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11414{
11415 /* We might have been passed a SUBREG. */
f676971a 11416 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11417 return 0;
f676971a 11418
90f81f99
AP
11419 /* We might have been passed non floating point registers. */
11420 if (!FP_REGNO_P (REGNO (reg1))
11421 || !FP_REGNO_P (REGNO (reg2)))
11422 return 0;
35068b43
RK
11423
11424 return (REGNO (reg1) == REGNO (reg2) - 1);
11425}
11426
a4f6c312
SS
11427/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11428 addr1 and addr2 must be in consecutive memory locations
11429 (addr2 == addr1 + 8). */
35068b43
RK
11430
11431int
90f81f99 11432mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11433{
90f81f99 11434 rtx addr1, addr2;
bb8df8a6
EC
11435 unsigned int reg1, reg2;
11436 int offset1, offset2;
35068b43 11437
90f81f99
AP
11438 /* The mems cannot be volatile. */
11439 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11440 return 0;
f676971a 11441
90f81f99
AP
11442 addr1 = XEXP (mem1, 0);
11443 addr2 = XEXP (mem2, 0);
11444
35068b43
RK
11445 /* Extract an offset (if used) from the first addr. */
11446 if (GET_CODE (addr1) == PLUS)
11447 {
11448 /* If not a REG, return zero. */
11449 if (GET_CODE (XEXP (addr1, 0)) != REG)
11450 return 0;
11451 else
11452 {
c4ad648e 11453 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11454 /* The offset must be constant! */
11455 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11456 return 0;
11457 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11458 }
11459 }
11460 else if (GET_CODE (addr1) != REG)
11461 return 0;
11462 else
11463 {
11464 reg1 = REGNO (addr1);
11465 /* This was a simple (mem (reg)) expression. Offset is 0. */
11466 offset1 = 0;
11467 }
11468
bb8df8a6
EC
11469 /* And now for the second addr. */
11470 if (GET_CODE (addr2) == PLUS)
11471 {
11472 /* If not a REG, return zero. */
11473 if (GET_CODE (XEXP (addr2, 0)) != REG)
11474 return 0;
11475 else
11476 {
11477 reg2 = REGNO (XEXP (addr2, 0));
11478 /* The offset must be constant. */
11479 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11480 return 0;
11481 offset2 = INTVAL (XEXP (addr2, 1));
11482 }
11483 }
11484 else if (GET_CODE (addr2) != REG)
35068b43 11485 return 0;
bb8df8a6
EC
11486 else
11487 {
11488 reg2 = REGNO (addr2);
11489 /* This was a simple (mem (reg)) expression. Offset is 0. */
11490 offset2 = 0;
11491 }
35068b43 11492
bb8df8a6
EC
11493 /* Both of these must have the same base register. */
11494 if (reg1 != reg2)
35068b43
RK
11495 return 0;
11496
11497 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11498 if (offset2 != offset1 + 8)
35068b43
RK
11499 return 0;
11500
11501 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11502 instructions. */
11503 return 1;
11504}
9878760c 11505\f
e41b2a33
PB
11506
11507rtx
11508rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11509{
11510 static bool eliminated = false;
11511 if (mode != SDmode)
11512 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11513 else
11514 {
11515 rtx mem = cfun->machine->sdmode_stack_slot;
11516 gcc_assert (mem != NULL_RTX);
11517
11518 if (!eliminated)
11519 {
11520 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11521 cfun->machine->sdmode_stack_slot = mem;
11522 eliminated = true;
11523 }
11524 return mem;
11525 }
11526}
11527
11528static tree
11529rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11530{
11531 /* Don't walk into types. */
11532 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11533 {
11534 *walk_subtrees = 0;
11535 return NULL_TREE;
11536 }
11537
11538 switch (TREE_CODE (*tp))
11539 {
11540 case VAR_DECL:
11541 case PARM_DECL:
11542 case FIELD_DECL:
11543 case RESULT_DECL:
48f5b722 11544 case SSA_NAME:
e41b2a33 11545 case REAL_CST:
fdf4f148 11546 case INDIRECT_REF:
a0f39282
JJ
11547 case ALIGN_INDIRECT_REF:
11548 case MISALIGNED_INDIRECT_REF:
fdf4f148 11549 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11550 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11551 return *tp;
11552 break;
11553 default:
11554 break;
11555 }
11556
11557 return NULL_TREE;
11558}
11559
11560
11561/* Allocate a 64-bit stack slot to be used for copying SDmode
11562 values through if this function has any SDmode references. */
11563
11564static void
11565rs6000_alloc_sdmode_stack_slot (void)
11566{
11567 tree t;
11568 basic_block bb;
726a989a 11569 gimple_stmt_iterator gsi;
e41b2a33
PB
11570
11571 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11572
11573 FOR_EACH_BB (bb)
726a989a 11574 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11575 {
726a989a 11576 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11577 if (ret)
11578 {
11579 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11580 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11581 SDmode, 0);
11582 return;
11583 }
11584 }
11585
11586 /* Check for any SDmode parameters of the function. */
11587 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11588 {
11589 if (TREE_TYPE (t) == error_mark_node)
11590 continue;
11591
11592 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11593 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11594 {
11595 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11596 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11597 SDmode, 0);
11598 return;
11599 }
11600 }
11601}
11602
11603static void
11604rs6000_instantiate_decls (void)
11605{
11606 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11607 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11608}
11609
9878760c 11610/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11611 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11612 NO_REGS is returned. */
11613
11614enum reg_class
0a2aaacc 11615rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11616 enum machine_mode mode ATTRIBUTE_UNUSED,
11617 rtx in)
9878760c 11618{
5accd822 11619 int regno;
9878760c 11620
ab82a49f
AP
11621 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11622#if TARGET_MACHO
c4ad648e 11623 && MACHOPIC_INDIRECT
ab82a49f 11624#endif
c4ad648e 11625 ))
46fad5b7
DJ
11626 {
11627 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11628 other than BASE_REGS for TARGET_ELF. So indicate that a
11629 register from BASE_REGS is needed as an intermediate
11630 register.
f676971a 11631
46fad5b7
DJ
11632 On Darwin, pic addresses require a load from memory, which
11633 needs a base register. */
0a2aaacc 11634 if (rclass != BASE_REGS
c4ad648e
AM
11635 && (GET_CODE (in) == SYMBOL_REF
11636 || GET_CODE (in) == HIGH
11637 || GET_CODE (in) == LABEL_REF
11638 || GET_CODE (in) == CONST))
11639 return BASE_REGS;
46fad5b7 11640 }
e7b7998a 11641
5accd822
DE
11642 if (GET_CODE (in) == REG)
11643 {
11644 regno = REGNO (in);
11645 if (regno >= FIRST_PSEUDO_REGISTER)
11646 {
11647 regno = true_regnum (in);
11648 if (regno >= FIRST_PSEUDO_REGISTER)
11649 regno = -1;
11650 }
11651 }
11652 else if (GET_CODE (in) == SUBREG)
11653 {
11654 regno = true_regnum (in);
11655 if (regno >= FIRST_PSEUDO_REGISTER)
11656 regno = -1;
11657 }
11658 else
11659 regno = -1;
11660
9878760c
RK
11661 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11662 into anything. */
0a2aaacc 11663 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11664 || (regno >= 0 && INT_REGNO_P (regno)))
11665 return NO_REGS;
11666
11667 /* Constants, memory, and FP registers can go into FP registers. */
11668 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11669 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11670 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11671
0ac081f6
AH
11672 /* Memory, and AltiVec registers can go into AltiVec registers. */
11673 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11674 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11675 return NO_REGS;
11676
9878760c 11677 /* We can copy among the CR registers. */
0a2aaacc 11678 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11679 && regno >= 0 && CR_REGNO_P (regno))
11680 return NO_REGS;
11681
11682 /* Otherwise, we need GENERAL_REGS. */
11683 return GENERAL_REGS;
11684}
11685\f
11686/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11687 know this is a valid comparison.
9878760c
RK
11688
11689 SCC_P is 1 if this is for an scc. That means that %D will have been
11690 used instead of %C, so the bits will be in different places.
11691
b4ac57ab 11692 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11693
11694int
a2369ed3 11695ccr_bit (rtx op, int scc_p)
9878760c
RK
11696{
11697 enum rtx_code code = GET_CODE (op);
11698 enum machine_mode cc_mode;
11699 int cc_regnum;
11700 int base_bit;
9ebbca7d 11701 rtx reg;
9878760c 11702
ec8e098d 11703 if (!COMPARISON_P (op))
9878760c
RK
11704 return -1;
11705
9ebbca7d
GK
11706 reg = XEXP (op, 0);
11707
37409796 11708 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11709
11710 cc_mode = GET_MODE (reg);
11711 cc_regnum = REGNO (reg);
11712 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11713
39a10a29 11714 validate_condition_mode (code, cc_mode);
c5defebb 11715
b7053a3f
GK
11716 /* When generating a sCOND operation, only positive conditions are
11717 allowed. */
37409796
NS
11718 gcc_assert (!scc_p
11719 || code == EQ || code == GT || code == LT || code == UNORDERED
11720 || code == GTU || code == LTU);
f676971a 11721
9878760c
RK
11722 switch (code)
11723 {
11724 case NE:
11725 return scc_p ? base_bit + 3 : base_bit + 2;
11726 case EQ:
11727 return base_bit + 2;
1c882ea4 11728 case GT: case GTU: case UNLE:
9878760c 11729 return base_bit + 1;
1c882ea4 11730 case LT: case LTU: case UNGE:
9878760c 11731 return base_bit;
1c882ea4
GK
11732 case ORDERED: case UNORDERED:
11733 return base_bit + 3;
9878760c
RK
11734
11735 case GE: case GEU:
39a10a29 11736 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11737 unordered position. So test that bit. For integer, this is ! LT
11738 unless this is an scc insn. */
39a10a29 11739 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11740
11741 case LE: case LEU:
39a10a29 11742 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11743
9878760c 11744 default:
37409796 11745 gcc_unreachable ();
9878760c
RK
11746 }
11747}
1ff7789b 11748\f
8d30c4ee 11749/* Return the GOT register. */
1ff7789b 11750
9390387d 11751rtx
a2369ed3 11752rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11753{
a4f6c312
SS
11754 /* The second flow pass currently (June 1999) can't update
11755 regs_ever_live without disturbing other parts of the compiler, so
11756 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11757 if (!can_create_pseudo_p ()
11758 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11759 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11760
e3b5732b 11761 crtl->uses_pic_offset_table = 1;
3cb999d8 11762
1ff7789b
MM
11763 return pic_offset_table_rtx;
11764}
a7df97e6 11765\f
e2500fed
GK
11766/* Function to init struct machine_function.
11767 This will be called, via a pointer variable,
11768 from push_function_context. */
a7df97e6 11769
e2500fed 11770static struct machine_function *
863d938c 11771rs6000_init_machine_status (void)
a7df97e6 11772{
5ead67f6 11773 return GGC_CNEW (machine_function);
a7df97e6 11774}
9878760c 11775\f
0ba1b2ff
AM
11776/* These macros test for integers and extract the low-order bits. */
11777#define INT_P(X) \
11778((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11779 && GET_MODE (X) == VOIDmode)
11780
11781#define INT_LOWPART(X) \
11782 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11783
11784int
a2369ed3 11785extract_MB (rtx op)
0ba1b2ff
AM
11786{
11787 int i;
11788 unsigned long val = INT_LOWPART (op);
11789
11790 /* If the high bit is zero, the value is the first 1 bit we find
11791 from the left. */
11792 if ((val & 0x80000000) == 0)
11793 {
37409796 11794 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11795
11796 i = 1;
11797 while (((val <<= 1) & 0x80000000) == 0)
11798 ++i;
11799 return i;
11800 }
11801
11802 /* If the high bit is set and the low bit is not, or the mask is all
11803 1's, the value is zero. */
11804 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11805 return 0;
11806
11807 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11808 from the right. */
11809 i = 31;
11810 while (((val >>= 1) & 1) != 0)
11811 --i;
11812
11813 return i;
11814}
11815
11816int
a2369ed3 11817extract_ME (rtx op)
0ba1b2ff
AM
11818{
11819 int i;
11820 unsigned long val = INT_LOWPART (op);
11821
11822 /* If the low bit is zero, the value is the first 1 bit we find from
11823 the right. */
11824 if ((val & 1) == 0)
11825 {
37409796 11826 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11827
11828 i = 30;
11829 while (((val >>= 1) & 1) == 0)
11830 --i;
11831
11832 return i;
11833 }
11834
11835 /* If the low bit is set and the high bit is not, or the mask is all
11836 1's, the value is 31. */
11837 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11838 return 31;
11839
11840 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11841 from the left. */
11842 i = 0;
11843 while (((val <<= 1) & 0x80000000) != 0)
11844 ++i;
11845
11846 return i;
11847}
11848
c4501e62
JJ
11849/* Locate some local-dynamic symbol still in use by this function
11850 so that we can print its name in some tls_ld pattern. */
11851
11852static const char *
863d938c 11853rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11854{
11855 rtx insn;
11856
11857 if (cfun->machine->some_ld_name)
11858 return cfun->machine->some_ld_name;
11859
11860 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11861 if (INSN_P (insn)
11862 && for_each_rtx (&PATTERN (insn),
11863 rs6000_get_some_local_dynamic_name_1, 0))
11864 return cfun->machine->some_ld_name;
11865
37409796 11866 gcc_unreachable ();
c4501e62
JJ
11867}
11868
11869/* Helper function for rs6000_get_some_local_dynamic_name. */
11870
11871static int
a2369ed3 11872rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11873{
11874 rtx x = *px;
11875
11876 if (GET_CODE (x) == SYMBOL_REF)
11877 {
11878 const char *str = XSTR (x, 0);
11879 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11880 {
11881 cfun->machine->some_ld_name = str;
11882 return 1;
11883 }
11884 }
11885
11886 return 0;
11887}
11888
85b776df
AM
11889/* Write out a function code label. */
11890
11891void
11892rs6000_output_function_entry (FILE *file, const char *fname)
11893{
11894 if (fname[0] != '.')
11895 {
11896 switch (DEFAULT_ABI)
11897 {
11898 default:
37409796 11899 gcc_unreachable ();
85b776df
AM
11900
11901 case ABI_AIX:
11902 if (DOT_SYMBOLS)
11903 putc ('.', file);
11904 else
11905 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11906 break;
11907
11908 case ABI_V4:
11909 case ABI_DARWIN:
11910 break;
11911 }
11912 }
11913 if (TARGET_AIX)
11914 RS6000_OUTPUT_BASENAME (file, fname);
11915 else
11916 assemble_name (file, fname);
11917}
11918
9878760c
RK
11919/* Print an operand. Recognize special options, documented below. */
11920
38c1f2d7 11921#if TARGET_ELF
d9407988 11922#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11923#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11924#else
11925#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11926#define SMALL_DATA_REG 0
ba5e43aa
MM
11927#endif
11928
9878760c 11929void
a2369ed3 11930print_operand (FILE *file, rtx x, int code)
9878760c
RK
11931{
11932 int i;
a260abc9 11933 HOST_WIDE_INT val;
0ba1b2ff 11934 unsigned HOST_WIDE_INT uval;
9878760c
RK
11935
11936 switch (code)
11937 {
a8b3aeda 11938 case '.':
a85d226b
RK
11939 /* Write out an instruction after the call which may be replaced
11940 with glue code by the loader. This depends on the AIX version. */
11941 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11942 return;
11943
81eace42
GK
11944 /* %a is output_address. */
11945
9854d9ed
RK
11946 case 'A':
11947 /* If X is a constant integer whose low-order 5 bits are zero,
11948 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11949 in the AIX assembler where "sri" with a zero shift count
20e26713 11950 writes a trash instruction. */
9854d9ed 11951 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11952 putc ('l', file);
9854d9ed 11953 else
76229ac8 11954 putc ('r', file);
9854d9ed
RK
11955 return;
11956
11957 case 'b':
e2c953b6
DE
11958 /* If constant, low-order 16 bits of constant, unsigned.
11959 Otherwise, write normally. */
11960 if (INT_P (x))
11961 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11962 else
11963 print_operand (file, x, 0);
cad12a8d
RK
11964 return;
11965
a260abc9
DE
11966 case 'B':
11967 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11968 for 64-bit mask direction. */
9390387d 11969 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11970 return;
a260abc9 11971
81eace42
GK
11972 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11973 output_operand. */
11974
423c1189
AH
11975 case 'c':
11976 /* X is a CR register. Print the number of the GT bit of the CR. */
11977 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11978 output_operand_lossage ("invalid %%E value");
11979 else
11980 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11981 return;
11982
11983 case 'D':
cef6b86c 11984 /* Like 'J' but get to the GT bit only. */
37409796 11985 gcc_assert (GET_CODE (x) == REG);
423c1189 11986
cef6b86c
EB
11987 /* Bit 1 is GT bit. */
11988 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11989
cef6b86c
EB
11990 /* Add one for shift count in rlinm for scc. */
11991 fprintf (file, "%d", i + 1);
423c1189
AH
11992 return;
11993
9854d9ed 11994 case 'E':
39a10a29 11995 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11996 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11997 output_operand_lossage ("invalid %%E value");
78fbdbf7 11998 else
39a10a29 11999 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 12000 return;
9854d9ed
RK
12001
12002 case 'f':
12003 /* X is a CR register. Print the shift count needed to move it
12004 to the high-order four bits. */
12005 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12006 output_operand_lossage ("invalid %%f value");
12007 else
9ebbca7d 12008 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12009 return;
12010
12011 case 'F':
12012 /* Similar, but print the count for the rotate in the opposite
12013 direction. */
12014 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12015 output_operand_lossage ("invalid %%F value");
12016 else
9ebbca7d 12017 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12018 return;
12019
12020 case 'G':
12021 /* X is a constant integer. If it is negative, print "m",
43aa4e05 12022 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
12023 if (GET_CODE (x) != CONST_INT)
12024 output_operand_lossage ("invalid %%G value");
12025 else if (INTVAL (x) >= 0)
76229ac8 12026 putc ('z', file);
9854d9ed 12027 else
76229ac8 12028 putc ('m', file);
9854d9ed 12029 return;
e2c953b6 12030
9878760c 12031 case 'h':
a4f6c312
SS
12032 /* If constant, output low-order five bits. Otherwise, write
12033 normally. */
9878760c 12034 if (INT_P (x))
5f59ecb7 12035 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12036 else
12037 print_operand (file, x, 0);
12038 return;
12039
64305719 12040 case 'H':
a4f6c312
SS
12041 /* If constant, output low-order six bits. Otherwise, write
12042 normally. */
64305719 12043 if (INT_P (x))
5f59ecb7 12044 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12045 else
12046 print_operand (file, x, 0);
12047 return;
12048
9854d9ed
RK
12049 case 'I':
12050 /* Print `i' if this is a constant, else nothing. */
9878760c 12051 if (INT_P (x))
76229ac8 12052 putc ('i', file);
9878760c
RK
12053 return;
12054
9854d9ed
RK
12055 case 'j':
12056 /* Write the bit number in CCR for jump. */
12057 i = ccr_bit (x, 0);
12058 if (i == -1)
12059 output_operand_lossage ("invalid %%j code");
9878760c 12060 else
9854d9ed 12061 fprintf (file, "%d", i);
9878760c
RK
12062 return;
12063
9854d9ed
RK
12064 case 'J':
12065 /* Similar, but add one for shift count in rlinm for scc and pass
12066 scc flag to `ccr_bit'. */
12067 i = ccr_bit (x, 1);
12068 if (i == -1)
12069 output_operand_lossage ("invalid %%J code");
12070 else
a0466a68
RK
12071 /* If we want bit 31, write a shift count of zero, not 32. */
12072 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12073 return;
12074
9854d9ed
RK
12075 case 'k':
12076 /* X must be a constant. Write the 1's complement of the
12077 constant. */
9878760c 12078 if (! INT_P (x))
9854d9ed 12079 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12080 else
12081 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12082 return;
12083
81eace42 12084 case 'K':
9ebbca7d
GK
12085 /* X must be a symbolic constant on ELF. Write an
12086 expression suitable for an 'addi' that adds in the low 16
12087 bits of the MEM. */
12088 if (GET_CODE (x) != CONST)
12089 {
12090 print_operand_address (file, x);
12091 fputs ("@l", file);
12092 }
12093 else
12094 {
12095 if (GET_CODE (XEXP (x, 0)) != PLUS
12096 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12097 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12098 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12099 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12100 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12101 fputs ("@l", file);
ed8d2920
MM
12102 /* For GNU as, there must be a non-alphanumeric character
12103 between 'l' and the number. The '-' is added by
12104 print_operand() already. */
12105 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12106 fputs ("+", file);
9ebbca7d
GK
12107 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12108 }
81eace42
GK
12109 return;
12110
12111 /* %l is output_asm_label. */
9ebbca7d 12112
9854d9ed
RK
12113 case 'L':
12114 /* Write second word of DImode or DFmode reference. Works on register
12115 or non-indexed memory only. */
12116 if (GET_CODE (x) == REG)
fb5c67a7 12117 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12118 else if (GET_CODE (x) == MEM)
12119 {
12120 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12121 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12122 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12123 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12124 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12125 UNITS_PER_WORD));
6fb5fa3c
DB
12126 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12127 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12128 UNITS_PER_WORD));
9854d9ed 12129 else
d7624dc0
RK
12130 output_address (XEXP (adjust_address_nv (x, SImode,
12131 UNITS_PER_WORD),
12132 0));
ed8908e7 12133
ba5e43aa 12134 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12135 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12136 reg_names[SMALL_DATA_REG]);
9854d9ed 12137 }
9878760c 12138 return;
f676971a 12139
9878760c
RK
12140 case 'm':
12141 /* MB value for a mask operand. */
b1765bde 12142 if (! mask_operand (x, SImode))
9878760c
RK
12143 output_operand_lossage ("invalid %%m value");
12144
0ba1b2ff 12145 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12146 return;
12147
12148 case 'M':
12149 /* ME value for a mask operand. */
b1765bde 12150 if (! mask_operand (x, SImode))
a260abc9 12151 output_operand_lossage ("invalid %%M value");
9878760c 12152
0ba1b2ff 12153 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12154 return;
12155
81eace42
GK
12156 /* %n outputs the negative of its operand. */
12157
9878760c
RK
12158 case 'N':
12159 /* Write the number of elements in the vector times 4. */
12160 if (GET_CODE (x) != PARALLEL)
12161 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12162 else
12163 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12164 return;
12165
12166 case 'O':
12167 /* Similar, but subtract 1 first. */
12168 if (GET_CODE (x) != PARALLEL)
1427100a 12169 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12170 else
12171 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12172 return;
12173
9854d9ed
RK
12174 case 'p':
12175 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12176 if (! INT_P (x)
2bfcf297 12177 || INT_LOWPART (x) < 0
9854d9ed
RK
12178 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12179 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12180 else
12181 fprintf (file, "%d", i);
9854d9ed
RK
12182 return;
12183
9878760c
RK
12184 case 'P':
12185 /* The operand must be an indirect memory reference. The result
8bb418a3 12186 is the register name. */
9878760c
RK
12187 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12188 || REGNO (XEXP (x, 0)) >= 32)
12189 output_operand_lossage ("invalid %%P value");
e2c953b6 12190 else
fb5c67a7 12191 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12192 return;
12193
dfbdccdb
GK
12194 case 'q':
12195 /* This outputs the logical code corresponding to a boolean
12196 expression. The expression may have one or both operands
39a10a29 12197 negated (if one, only the first one). For condition register
c4ad648e
AM
12198 logical operations, it will also treat the negated
12199 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12200 {
63bc1d05 12201 const char *const *t = 0;
dfbdccdb
GK
12202 const char *s;
12203 enum rtx_code code = GET_CODE (x);
12204 static const char * const tbl[3][3] = {
12205 { "and", "andc", "nor" },
12206 { "or", "orc", "nand" },
12207 { "xor", "eqv", "xor" } };
12208
12209 if (code == AND)
12210 t = tbl[0];
12211 else if (code == IOR)
12212 t = tbl[1];
12213 else if (code == XOR)
12214 t = tbl[2];
12215 else
12216 output_operand_lossage ("invalid %%q value");
12217
12218 if (GET_CODE (XEXP (x, 0)) != NOT)
12219 s = t[0];
12220 else
12221 {
12222 if (GET_CODE (XEXP (x, 1)) == NOT)
12223 s = t[2];
12224 else
12225 s = t[1];
12226 }
f676971a 12227
dfbdccdb
GK
12228 fputs (s, file);
12229 }
12230 return;
12231
2c4a9cff
DE
12232 case 'Q':
12233 if (TARGET_MFCRF)
3b6ce0af 12234 fputc (',', file);
5efb1046 12235 /* FALLTHRU */
2c4a9cff
DE
12236 else
12237 return;
12238
9854d9ed
RK
12239 case 'R':
12240 /* X is a CR register. Print the mask for `mtcrf'. */
12241 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12242 output_operand_lossage ("invalid %%R value");
12243 else
9ebbca7d 12244 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12245 return;
9854d9ed
RK
12246
12247 case 's':
12248 /* Low 5 bits of 32 - value */
12249 if (! INT_P (x))
12250 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12251 else
12252 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12253 return;
9854d9ed 12254
a260abc9 12255 case 'S':
0ba1b2ff 12256 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12257 CONST_INT 32-bit mask is considered sign-extended so any
12258 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12259 if (! mask64_operand (x, DImode))
a260abc9
DE
12260 output_operand_lossage ("invalid %%S value");
12261
0ba1b2ff 12262 uval = INT_LOWPART (x);
a260abc9 12263
0ba1b2ff 12264 if (uval & 1) /* Clear Left */
a260abc9 12265 {
f099d360
GK
12266#if HOST_BITS_PER_WIDE_INT > 64
12267 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12268#endif
0ba1b2ff 12269 i = 64;
a260abc9 12270 }
0ba1b2ff 12271 else /* Clear Right */
a260abc9 12272 {
0ba1b2ff 12273 uval = ~uval;
f099d360
GK
12274#if HOST_BITS_PER_WIDE_INT > 64
12275 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12276#endif
0ba1b2ff 12277 i = 63;
a260abc9 12278 }
0ba1b2ff
AM
12279 while (uval != 0)
12280 --i, uval >>= 1;
37409796 12281 gcc_assert (i >= 0);
0ba1b2ff
AM
12282 fprintf (file, "%d", i);
12283 return;
a260abc9 12284
a3170dc6
AH
12285 case 't':
12286 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12287 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12288
12289 /* Bit 3 is OV bit. */
12290 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12291
12292 /* If we want bit 31, write a shift count of zero, not 32. */
12293 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12294 return;
12295
cccf3bdc
DE
12296 case 'T':
12297 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12298 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12299 && REGNO (x) != CTR_REGNO))
cccf3bdc 12300 output_operand_lossage ("invalid %%T value");
1de43f85 12301 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12302 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12303 else
12304 fputs ("ctr", file);
12305 return;
12306
9854d9ed 12307 case 'u':
802a0058 12308 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12309 if (! INT_P (x))
12310 output_operand_lossage ("invalid %%u value");
e2c953b6 12311 else
f676971a 12312 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12313 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12314 return;
12315
802a0058
MM
12316 case 'v':
12317 /* High-order 16 bits of constant for use in signed operand. */
12318 if (! INT_P (x))
12319 output_operand_lossage ("invalid %%v value");
e2c953b6 12320 else
134c32f6
DE
12321 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12322 (INT_LOWPART (x) >> 16) & 0xffff);
12323 return;
802a0058 12324
9854d9ed
RK
12325 case 'U':
12326 /* Print `u' if this has an auto-increment or auto-decrement. */
12327 if (GET_CODE (x) == MEM
12328 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12329 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12330 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12331 putc ('u', file);
9854d9ed 12332 return;
9878760c 12333
e0cd0770
JC
12334 case 'V':
12335 /* Print the trap code for this operand. */
12336 switch (GET_CODE (x))
12337 {
12338 case EQ:
12339 fputs ("eq", file); /* 4 */
12340 break;
12341 case NE:
12342 fputs ("ne", file); /* 24 */
12343 break;
12344 case LT:
12345 fputs ("lt", file); /* 16 */
12346 break;
12347 case LE:
12348 fputs ("le", file); /* 20 */
12349 break;
12350 case GT:
12351 fputs ("gt", file); /* 8 */
12352 break;
12353 case GE:
12354 fputs ("ge", file); /* 12 */
12355 break;
12356 case LTU:
12357 fputs ("llt", file); /* 2 */
12358 break;
12359 case LEU:
12360 fputs ("lle", file); /* 6 */
12361 break;
12362 case GTU:
12363 fputs ("lgt", file); /* 1 */
12364 break;
12365 case GEU:
12366 fputs ("lge", file); /* 5 */
12367 break;
12368 default:
37409796 12369 gcc_unreachable ();
e0cd0770
JC
12370 }
12371 break;
12372
9854d9ed
RK
12373 case 'w':
12374 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12375 normally. */
12376 if (INT_P (x))
f676971a 12377 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12378 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12379 else
12380 print_operand (file, x, 0);
9878760c
RK
12381 return;
12382
9854d9ed 12383 case 'W':
e2c953b6 12384 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12385 val = (GET_CODE (x) == CONST_INT
12386 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12387
12388 if (val < 0)
12389 i = -1;
9854d9ed 12390 else
e2c953b6
DE
12391 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12392 if ((val <<= 1) < 0)
12393 break;
12394
12395#if HOST_BITS_PER_WIDE_INT == 32
12396 if (GET_CODE (x) == CONST_INT && i >= 0)
12397 i += 32; /* zero-extend high-part was all 0's */
12398 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12399 {
12400 val = CONST_DOUBLE_LOW (x);
12401
37409796
NS
12402 gcc_assert (val);
12403 if (val < 0)
e2c953b6
DE
12404 --i;
12405 else
12406 for ( ; i < 64; i++)
12407 if ((val <<= 1) < 0)
12408 break;
12409 }
12410#endif
12411
12412 fprintf (file, "%d", i + 1);
9854d9ed 12413 return;
9878760c 12414
9854d9ed
RK
12415 case 'X':
12416 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12417 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12418 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12419 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12420 putc ('x', file);
9854d9ed 12421 return;
9878760c 12422
9854d9ed
RK
12423 case 'Y':
12424 /* Like 'L', for third word of TImode */
12425 if (GET_CODE (x) == REG)
fb5c67a7 12426 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12427 else if (GET_CODE (x) == MEM)
9878760c 12428 {
9854d9ed
RK
12429 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12430 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12431 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12432 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12433 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12434 else
d7624dc0 12435 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12436 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12437 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12438 reg_names[SMALL_DATA_REG]);
9878760c
RK
12439 }
12440 return;
f676971a 12441
9878760c 12442 case 'z':
b4ac57ab
RS
12443 /* X is a SYMBOL_REF. Write out the name preceded by a
12444 period and without any trailing data in brackets. Used for function
4d30c363
MM
12445 names. If we are configured for System V (or the embedded ABI) on
12446 the PowerPC, do not emit the period, since those systems do not use
12447 TOCs and the like. */
37409796 12448 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12449
c4ad648e
AM
12450 /* Mark the decl as referenced so that cgraph will output the
12451 function. */
9bf6462a 12452 if (SYMBOL_REF_DECL (x))
c4ad648e 12453 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12454
85b776df 12455 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12456 if (TARGET_MACHO)
12457 {
12458 const char *name = XSTR (x, 0);
a031e781 12459#if TARGET_MACHO
3b48085e 12460 if (MACHOPIC_INDIRECT
11abc112
MM
12461 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12462 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12463#endif
12464 assemble_name (file, name);
12465 }
85b776df 12466 else if (!DOT_SYMBOLS)
9739c90c 12467 assemble_name (file, XSTR (x, 0));
85b776df
AM
12468 else
12469 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12470 return;
12471
9854d9ed
RK
12472 case 'Z':
12473 /* Like 'L', for last word of TImode. */
12474 if (GET_CODE (x) == REG)
fb5c67a7 12475 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12476 else if (GET_CODE (x) == MEM)
12477 {
12478 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12479 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12480 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12481 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12482 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12483 else
d7624dc0 12484 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12485 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12486 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12487 reg_names[SMALL_DATA_REG]);
9854d9ed 12488 }
5c23c401 12489 return;
0ac081f6 12490
a3170dc6 12491 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12492 case 'y':
12493 {
12494 rtx tmp;
12495
37409796 12496 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12497
12498 tmp = XEXP (x, 0);
12499
90d3ff1c 12500 /* Ugly hack because %y is overloaded. */
8ef65e3d 12501 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12502 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12503 || GET_MODE (x) == TFmode
12504 || GET_MODE (x) == TImode))
a3170dc6
AH
12505 {
12506 /* Handle [reg]. */
12507 if (GET_CODE (tmp) == REG)
12508 {
12509 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12510 break;
12511 }
12512 /* Handle [reg+UIMM]. */
12513 else if (GET_CODE (tmp) == PLUS &&
12514 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12515 {
12516 int x;
12517
37409796 12518 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12519
12520 x = INTVAL (XEXP (tmp, 1));
12521 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12522 break;
12523 }
12524
12525 /* Fall through. Must be [reg+reg]. */
12526 }
850e8d3d
DN
12527 if (TARGET_ALTIVEC
12528 && GET_CODE (tmp) == AND
12529 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12530 && INTVAL (XEXP (tmp, 1)) == -16)
12531 tmp = XEXP (tmp, 0);
0ac081f6 12532 if (GET_CODE (tmp) == REG)
c62f2db5 12533 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12534 else
0ac081f6 12535 {
cb8cc791
AP
12536 if (!GET_CODE (tmp) == PLUS
12537 || !REG_P (XEXP (tmp, 0))
12538 || !REG_P (XEXP (tmp, 1)))
12539 {
12540 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12541 break;
12542 }
bb8df8a6 12543
0ac081f6
AH
12544 if (REGNO (XEXP (tmp, 0)) == 0)
12545 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12546 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12547 else
12548 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12549 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12550 }
0ac081f6
AH
12551 break;
12552 }
f676971a 12553
9878760c
RK
12554 case 0:
12555 if (GET_CODE (x) == REG)
12556 fprintf (file, "%s", reg_names[REGNO (x)]);
12557 else if (GET_CODE (x) == MEM)
12558 {
12559 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12560 know the width from the mode. */
12561 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12562 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12563 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12564 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12565 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12566 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12567 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12568 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12569 else
a54d04b7 12570 output_address (XEXP (x, 0));
9878760c
RK
12571 }
12572 else
a54d04b7 12573 output_addr_const (file, x);
a85d226b 12574 return;
9878760c 12575
c4501e62
JJ
12576 case '&':
12577 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12578 return;
12579
9878760c
RK
12580 default:
12581 output_operand_lossage ("invalid %%xn code");
12582 }
12583}
12584\f
12585/* Print the address of an operand. */
12586
12587void
a2369ed3 12588print_operand_address (FILE *file, rtx x)
9878760c
RK
12589{
12590 if (GET_CODE (x) == REG)
4697a36c 12591 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12592 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12593 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12594 {
12595 output_addr_const (file, x);
ba5e43aa 12596 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12597 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12598 reg_names[SMALL_DATA_REG]);
37409796
NS
12599 else
12600 gcc_assert (!TARGET_TOC);
9878760c
RK
12601 }
12602 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12603 {
9024f4b8 12604 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12605 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12606 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12607 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12608 else
4697a36c
MM
12609 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12610 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12611 }
12612 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12613 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12614 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12615#if TARGET_ELF
12616 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12617 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12618 {
12619 output_addr_const (file, XEXP (x, 1));
12620 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12621 }
c859cda6
DJ
12622#endif
12623#if TARGET_MACHO
12624 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12625 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12626 {
12627 fprintf (file, "lo16(");
12628 output_addr_const (file, XEXP (x, 1));
12629 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12630 }
3cb999d8 12631#endif
4d588c14 12632 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12633 {
2e4316da 12634 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
12635 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12636 }
9878760c 12637 else
37409796 12638 gcc_unreachable ();
9878760c
RK
12639}
12640\f
2e4316da
RS
12641/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12642
12643bool
12644rs6000_output_addr_const_extra (FILE *file, rtx x)
12645{
12646 if (GET_CODE (x) == UNSPEC)
12647 switch (XINT (x, 1))
12648 {
12649 case UNSPEC_TOCREL:
12650 x = XVECEXP (x, 0, 0);
12651 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12652 output_addr_const (file, x);
12653 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12654 {
12655 putc ('-', file);
12656 assemble_name (file, toc_label_name);
12657 }
12658 else if (TARGET_ELF)
12659 fputs ("@toc", file);
12660 return true;
08a6a74b
RS
12661
12662#if TARGET_MACHO
12663 case UNSPEC_MACHOPIC_OFFSET:
12664 output_addr_const (file, XVECEXP (x, 0, 0));
12665 putc ('-', file);
12666 machopic_output_function_base_name (file);
12667 return true;
12668#endif
2e4316da
RS
12669 }
12670 return false;
12671}
12672\f
88cad84b 12673/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12674 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12675 is defined. It also needs to handle DI-mode objects on 64-bit
12676 targets. */
12677
12678static bool
a2369ed3 12679rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12680{
f4f4921e 12681#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12682 /* Special handling for SI values. */
84dcde01 12683 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12684 {
301d03af 12685 static int recurse = 0;
f676971a 12686
301d03af
RS
12687 /* For -mrelocatable, we mark all addresses that need to be fixed up
12688 in the .fixup section. */
12689 if (TARGET_RELOCATABLE
d6b5193b
RS
12690 && in_section != toc_section
12691 && in_section != text_section
4325ca90 12692 && !unlikely_text_section_p (in_section)
301d03af
RS
12693 && !recurse
12694 && GET_CODE (x) != CONST_INT
12695 && GET_CODE (x) != CONST_DOUBLE
12696 && CONSTANT_P (x))
12697 {
12698 char buf[256];
12699
12700 recurse = 1;
12701 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12702 fixuplabelno++;
12703 ASM_OUTPUT_LABEL (asm_out_file, buf);
12704 fprintf (asm_out_file, "\t.long\t(");
12705 output_addr_const (asm_out_file, x);
12706 fprintf (asm_out_file, ")@fixup\n");
12707 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12708 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12709 fprintf (asm_out_file, "\t.long\t");
12710 assemble_name (asm_out_file, buf);
12711 fprintf (asm_out_file, "\n\t.previous\n");
12712 recurse = 0;
12713 return true;
12714 }
12715 /* Remove initial .'s to turn a -mcall-aixdesc function
12716 address into the address of the descriptor, not the function
12717 itself. */
12718 else if (GET_CODE (x) == SYMBOL_REF
12719 && XSTR (x, 0)[0] == '.'
12720 && DEFAULT_ABI == ABI_AIX)
12721 {
12722 const char *name = XSTR (x, 0);
12723 while (*name == '.')
12724 name++;
12725
12726 fprintf (asm_out_file, "\t.long\t%s\n", name);
12727 return true;
12728 }
12729 }
f4f4921e 12730#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12731 return default_assemble_integer (x, size, aligned_p);
12732}
93638d7a
AM
12733
12734#ifdef HAVE_GAS_HIDDEN
12735/* Emit an assembler directive to set symbol visibility for DECL to
12736 VISIBILITY_TYPE. */
12737
5add3202 12738static void
a2369ed3 12739rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12740{
93638d7a
AM
12741 /* Functions need to have their entry point symbol visibility set as
12742 well as their descriptor symbol visibility. */
85b776df
AM
12743 if (DEFAULT_ABI == ABI_AIX
12744 && DOT_SYMBOLS
12745 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12746 {
25fdb4dc 12747 static const char * const visibility_types[] = {
c4ad648e 12748 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12749 };
12750
12751 const char *name, *type;
93638d7a
AM
12752
12753 name = ((* targetm.strip_name_encoding)
12754 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12755 type = visibility_types[vis];
93638d7a 12756
25fdb4dc
RH
12757 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12758 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12759 }
25fdb4dc
RH
12760 else
12761 default_assemble_visibility (decl, vis);
93638d7a
AM
12762}
12763#endif
301d03af 12764\f
39a10a29 12765enum rtx_code
a2369ed3 12766rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12767{
12768 /* Reversal of FP compares takes care -- an ordered compare
12769 becomes an unordered compare and vice versa. */
f676971a 12770 if (mode == CCFPmode
bc9ec0e0
GK
12771 && (!flag_finite_math_only
12772 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12773 || code == UNEQ || code == LTGT))
bab6226b 12774 return reverse_condition_maybe_unordered (code);
39a10a29 12775 else
bab6226b 12776 return reverse_condition (code);
39a10a29
GK
12777}
12778
39a10a29
GK
12779/* Generate a compare for CODE. Return a brand-new rtx that
12780 represents the result of the compare. */
a4f6c312 12781
39a10a29 12782static rtx
f90b7a5a 12783rs6000_generate_compare (rtx cmp, enum machine_mode mode)
39a10a29
GK
12784{
12785 enum machine_mode comp_mode;
12786 rtx compare_result;
f90b7a5a
PB
12787 enum rtx_code code = GET_CODE (cmp);
12788 rtx op0 = XEXP (cmp, 0);
12789 rtx op1 = XEXP (cmp, 1);
39a10a29 12790
f90b7a5a 12791 if (FLOAT_MODE_P (mode))
39a10a29
GK
12792 comp_mode = CCFPmode;
12793 else if (code == GTU || code == LTU
c4ad648e 12794 || code == GEU || code == LEU)
39a10a29 12795 comp_mode = CCUNSmode;
60934f9c 12796 else if ((code == EQ || code == NE)
f90b7a5a
PB
12797 && GET_CODE (op0) == SUBREG
12798 && GET_CODE (op1) == SUBREG
12799 && SUBREG_PROMOTED_UNSIGNED_P (op0)
12800 && SUBREG_PROMOTED_UNSIGNED_P (op1))
60934f9c
NS
12801 /* These are unsigned values, perhaps there will be a later
12802 ordering compare that can be shared with this one.
12803 Unfortunately we cannot detect the signedness of the operands
12804 for non-subregs. */
12805 comp_mode = CCUNSmode;
39a10a29
GK
12806 else
12807 comp_mode = CCmode;
12808
12809 /* First, the compare. */
12810 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12811
cef6b86c 12812 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12813 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
f90b7a5a 12814 && FLOAT_MODE_P (mode))
a3170dc6 12815 {
64022b5d 12816 rtx cmp, or_result, compare_result2;
f90b7a5a 12817 enum machine_mode op_mode = GET_MODE (op0);
4d4cbc0e
AH
12818
12819 if (op_mode == VOIDmode)
f90b7a5a 12820 op_mode = GET_MODE (op1);
a3170dc6 12821
cef6b86c
EB
12822 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12823 This explains the following mess. */
423c1189 12824
a3170dc6
AH
12825 switch (code)
12826 {
423c1189 12827 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12828 switch (op_mode)
12829 {
12830 case SFmode:
1cdc0d8f 12831 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12832 ? gen_tstsfeq_gpr (compare_result, op0, op1)
12833 : gen_cmpsfeq_gpr (compare_result, op0, op1);
37409796
NS
12834 break;
12835
12836 case DFmode:
1cdc0d8f 12837 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12838 ? gen_tstdfeq_gpr (compare_result, op0, op1)
12839 : gen_cmpdfeq_gpr (compare_result, op0, op1);
37409796
NS
12840 break;
12841
17caeff2 12842 case TFmode:
1cdc0d8f 12843 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12844 ? gen_tsttfeq_gpr (compare_result, op0, op1)
12845 : gen_cmptfeq_gpr (compare_result, op0, op1);
17caeff2
JM
12846 break;
12847
37409796
NS
12848 default:
12849 gcc_unreachable ();
12850 }
a3170dc6 12851 break;
bb8df8a6 12852
423c1189 12853 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12854 switch (op_mode)
12855 {
12856 case SFmode:
1cdc0d8f 12857 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12858 ? gen_tstsfgt_gpr (compare_result, op0, op1)
12859 : gen_cmpsfgt_gpr (compare_result, op0, op1);
37409796 12860 break;
bb8df8a6 12861
37409796 12862 case DFmode:
1cdc0d8f 12863 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12864 ? gen_tstdfgt_gpr (compare_result, op0, op1)
12865 : gen_cmpdfgt_gpr (compare_result, op0, op1);
37409796
NS
12866 break;
12867
17caeff2 12868 case TFmode:
1cdc0d8f 12869 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12870 ? gen_tsttfgt_gpr (compare_result, op0, op1)
12871 : gen_cmptfgt_gpr (compare_result, op0, op1);
17caeff2
JM
12872 break;
12873
37409796
NS
12874 default:
12875 gcc_unreachable ();
12876 }
a3170dc6 12877 break;
bb8df8a6 12878
423c1189 12879 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12880 switch (op_mode)
12881 {
12882 case SFmode:
1cdc0d8f 12883 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12884 ? gen_tstsflt_gpr (compare_result, op0, op1)
12885 : gen_cmpsflt_gpr (compare_result, op0, op1);
37409796 12886 break;
bb8df8a6 12887
37409796 12888 case DFmode:
1cdc0d8f 12889 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12890 ? gen_tstdflt_gpr (compare_result, op0, op1)
12891 : gen_cmpdflt_gpr (compare_result, op0, op1);
37409796
NS
12892 break;
12893
17caeff2 12894 case TFmode:
1cdc0d8f 12895 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12896 ? gen_tsttflt_gpr (compare_result, op0, op1)
12897 : gen_cmptflt_gpr (compare_result, op0, op1);
17caeff2
JM
12898 break;
12899
37409796
NS
12900 default:
12901 gcc_unreachable ();
12902 }
a3170dc6 12903 break;
4d4cbc0e 12904 default:
37409796 12905 gcc_unreachable ();
a3170dc6
AH
12906 }
12907
12908 /* Synthesize LE and GE from LT/GT || EQ. */
12909 if (code == LE || code == GE || code == LEU || code == GEU)
12910 {
a3170dc6
AH
12911 emit_insn (cmp);
12912
12913 switch (code)
12914 {
12915 case LE: code = LT; break;
12916 case GE: code = GT; break;
12917 case LEU: code = LT; break;
12918 case GEU: code = GT; break;
37409796 12919 default: gcc_unreachable ();
a3170dc6
AH
12920 }
12921
a3170dc6
AH
12922 compare_result2 = gen_reg_rtx (CCFPmode);
12923
12924 /* Do the EQ. */
37409796
NS
12925 switch (op_mode)
12926 {
12927 case SFmode:
1cdc0d8f 12928 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12929 ? gen_tstsfeq_gpr (compare_result2, op0, op1)
12930 : gen_cmpsfeq_gpr (compare_result2, op0, op1);
37409796
NS
12931 break;
12932
12933 case DFmode:
1cdc0d8f 12934 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12935 ? gen_tstdfeq_gpr (compare_result2, op0, op1)
12936 : gen_cmpdfeq_gpr (compare_result2, op0, op1);
37409796
NS
12937 break;
12938
17caeff2 12939 case TFmode:
1cdc0d8f 12940 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12941 ? gen_tsttfeq_gpr (compare_result2, op0, op1)
12942 : gen_cmptfeq_gpr (compare_result2, op0, op1);
17caeff2
JM
12943 break;
12944
37409796
NS
12945 default:
12946 gcc_unreachable ();
12947 }
a3170dc6
AH
12948 emit_insn (cmp);
12949
a3170dc6 12950 /* OR them together. */
64022b5d
AH
12951 or_result = gen_reg_rtx (CCFPmode);
12952 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12953 compare_result2);
a3170dc6
AH
12954 compare_result = or_result;
12955 code = EQ;
12956 }
12957 else
12958 {
a3170dc6 12959 if (code == NE || code == LTGT)
a3170dc6 12960 code = NE;
423c1189
AH
12961 else
12962 code = EQ;
a3170dc6
AH
12963 }
12964
12965 emit_insn (cmp);
12966 }
12967 else
de17c25f
DE
12968 {
12969 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12970 CLOBBERs to match cmptf_internal2 pattern. */
12971 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
f90b7a5a 12972 && GET_MODE (op0) == TFmode
602ea4d3 12973 && !TARGET_IEEEQUAD
de17c25f
DE
12974 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12975 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12976 gen_rtvec (9,
12977 gen_rtx_SET (VOIDmode,
12978 compare_result,
f90b7a5a 12979 gen_rtx_COMPARE (comp_mode, op0, op1)),
de17c25f
DE
12980 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12981 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12982 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12983 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12984 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12985 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12986 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12987 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
f90b7a5a
PB
12988 else if (GET_CODE (op1) == UNSPEC
12989 && XINT (op1, 1) == UNSPEC_SP_TEST)
3aebbe5f 12990 {
c24151ce 12991 rtx op1b = XVECEXP (op1, 0, 0);
3aebbe5f
JJ
12992 comp_mode = CCEQmode;
12993 compare_result = gen_reg_rtx (CCEQmode);
12994 if (TARGET_64BIT)
c24151ce 12995 emit_insn (gen_stack_protect_testdi (compare_result, op0, op1b));
3aebbe5f 12996 else
c24151ce 12997 emit_insn (gen_stack_protect_testsi (compare_result, op0, op1b));
3aebbe5f 12998 }
de17c25f
DE
12999 else
13000 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
f90b7a5a 13001 gen_rtx_COMPARE (comp_mode, op0, op1)));
de17c25f 13002 }
f676971a 13003
ca5adc63 13004 /* Some kinds of FP comparisons need an OR operation;
e7108df9 13005 under flag_finite_math_only we don't bother. */
f90b7a5a 13006 if (FLOAT_MODE_P (mode)
e7108df9 13007 && !flag_finite_math_only
8ef65e3d 13008 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13009 && (code == LE || code == GE
13010 || code == UNEQ || code == LTGT
13011 || code == UNGT || code == UNLT))
13012 {
13013 enum rtx_code or1, or2;
13014 rtx or1_rtx, or2_rtx, compare2_rtx;
13015 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13016
39a10a29
GK
13017 switch (code)
13018 {
13019 case LE: or1 = LT; or2 = EQ; break;
13020 case GE: or1 = GT; or2 = EQ; break;
13021 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13022 case LTGT: or1 = LT; or2 = GT; break;
13023 case UNGT: or1 = UNORDERED; or2 = GT; break;
13024 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13025 default: gcc_unreachable ();
39a10a29
GK
13026 }
13027 validate_condition_mode (or1, comp_mode);
13028 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13029 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13030 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13031 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13032 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13033 const_true_rtx);
13034 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13035
13036 compare_result = or_result;
13037 code = EQ;
13038 }
13039
13040 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13041
1c563bed 13042 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13043}
13044
13045
13046/* Emit the RTL for an sCOND pattern. */
13047
13048void
f90b7a5a 13049rs6000_emit_sCOND (enum machine_mode mode, rtx operands[])
39a10a29
GK
13050{
13051 rtx condition_rtx;
13052 enum machine_mode op_mode;
b7053a3f 13053 enum rtx_code cond_code;
f90b7a5a 13054 rtx result = operands[0];
39a10a29 13055
f90b7a5a 13056 condition_rtx = rs6000_generate_compare (operands[1], mode);
b7053a3f
GK
13057 cond_code = GET_CODE (condition_rtx);
13058
f90b7a5a 13059 if (FLOAT_MODE_P (mode)
423c1189
AH
13060 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13061 {
13062 rtx t;
13063
13064 PUT_MODE (condition_rtx, SImode);
13065 t = XEXP (condition_rtx, 0);
13066
37409796 13067 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13068
13069 if (cond_code == NE)
64022b5d 13070 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13071
64022b5d 13072 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13073 return;
13074 }
13075
b7053a3f
GK
13076 if (cond_code == NE
13077 || cond_code == GE || cond_code == LE
13078 || cond_code == GEU || cond_code == LEU
13079 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13080 {
13081 rtx not_result = gen_reg_rtx (CCEQmode);
13082 rtx not_op, rev_cond_rtx;
13083 enum machine_mode cc_mode;
f676971a 13084
b7053a3f
GK
13085 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13086
1c563bed 13087 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13088 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13089 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13090 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13091 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13092 }
39a10a29 13093
f90b7a5a 13094 op_mode = GET_MODE (XEXP (operands[1], 0));
39a10a29 13095 if (op_mode == VOIDmode)
f90b7a5a 13096 op_mode = GET_MODE (XEXP (operands[1], 1));
39a10a29 13097
f90b7a5a 13098 if (TARGET_POWERPC64 && (op_mode == DImode || FLOAT_MODE_P (mode)))
39a10a29
GK
13099 {
13100 PUT_MODE (condition_rtx, DImode);
13101 convert_move (result, condition_rtx, 0);
13102 }
13103 else
13104 {
13105 PUT_MODE (condition_rtx, SImode);
13106 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13107 }
13108}
13109
39a10a29
GK
13110/* Emit a branch of kind CODE to location LOC. */
13111
13112void
f90b7a5a 13113rs6000_emit_cbranch (enum machine_mode mode, rtx operands[])
39a10a29
GK
13114{
13115 rtx condition_rtx, loc_ref;
13116
f90b7a5a
PB
13117 condition_rtx = rs6000_generate_compare (operands[0], mode);
13118 loc_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
39a10a29
GK
13119 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13120 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13121 loc_ref, pc_rtx)));
13122}
13123
12a4e8c5
GK
13124/* Return the string to output a conditional branch to LABEL, which is
13125 the operand number of the label, or -1 if the branch is really a
f676971a 13126 conditional return.
12a4e8c5
GK
13127
13128 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13129 condition code register and its mode specifies what kind of
13130 comparison we made.
13131
a0ab749a 13132 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13133
13134 INSN is the insn. */
13135
13136char *
a2369ed3 13137output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13138{
13139 static char string[64];
13140 enum rtx_code code = GET_CODE (op);
13141 rtx cc_reg = XEXP (op, 0);
13142 enum machine_mode mode = GET_MODE (cc_reg);
13143 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13144 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13145 int really_reversed = reversed ^ need_longbranch;
13146 char *s = string;
13147 const char *ccode;
13148 const char *pred;
13149 rtx note;
13150
39a10a29
GK
13151 validate_condition_mode (code, mode);
13152
13153 /* Work out which way this really branches. We could use
13154 reverse_condition_maybe_unordered here always but this
13155 makes the resulting assembler clearer. */
12a4e8c5 13156 if (really_reversed)
de40e1df
DJ
13157 {
13158 /* Reversal of FP compares takes care -- an ordered compare
13159 becomes an unordered compare and vice versa. */
13160 if (mode == CCFPmode)
13161 code = reverse_condition_maybe_unordered (code);
13162 else
13163 code = reverse_condition (code);
13164 }
12a4e8c5 13165
8ef65e3d 13166 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13167 {
13168 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13169 to the GT bit. */
37409796
NS
13170 switch (code)
13171 {
13172 case EQ:
13173 /* Opposite of GT. */
13174 code = GT;
13175 break;
13176
13177 case NE:
13178 code = UNLE;
13179 break;
13180
13181 default:
13182 gcc_unreachable ();
13183 }
a3170dc6
AH
13184 }
13185
39a10a29 13186 switch (code)
12a4e8c5
GK
13187 {
13188 /* Not all of these are actually distinct opcodes, but
13189 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13190 case NE: case LTGT:
13191 ccode = "ne"; break;
13192 case EQ: case UNEQ:
13193 ccode = "eq"; break;
f676971a 13194 case GE: case GEU:
50a0b056 13195 ccode = "ge"; break;
f676971a 13196 case GT: case GTU: case UNGT:
50a0b056 13197 ccode = "gt"; break;
f676971a 13198 case LE: case LEU:
50a0b056 13199 ccode = "le"; break;
f676971a 13200 case LT: case LTU: case UNLT:
50a0b056 13201 ccode = "lt"; break;
12a4e8c5
GK
13202 case UNORDERED: ccode = "un"; break;
13203 case ORDERED: ccode = "nu"; break;
13204 case UNGE: ccode = "nl"; break;
13205 case UNLE: ccode = "ng"; break;
13206 default:
37409796 13207 gcc_unreachable ();
12a4e8c5 13208 }
f676971a
EC
13209
13210 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13211 The old mnemonics don't have a way to specify this information. */
f4857b9b 13212 pred = "";
12a4e8c5
GK
13213 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13214 if (note != NULL_RTX)
13215 {
13216 /* PROB is the difference from 50%. */
13217 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13218
13219 /* Only hint for highly probable/improbable branches on newer
13220 cpus as static prediction overrides processor dynamic
13221 prediction. For older cpus we may as well always hint, but
13222 assume not taken for branches that are very close to 50% as a
13223 mispredicted taken branch is more expensive than a
f676971a 13224 mispredicted not-taken branch. */
ec507f2d 13225 if (rs6000_always_hint
2c9e13f3
JH
13226 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13227 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13228 {
13229 if (abs (prob) > REG_BR_PROB_BASE / 20
13230 && ((prob > 0) ^ need_longbranch))
c4ad648e 13231 pred = "+";
f4857b9b
AM
13232 else
13233 pred = "-";
13234 }
12a4e8c5 13235 }
12a4e8c5
GK
13236
13237 if (label == NULL)
94a54f47 13238 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13239 else
94a54f47 13240 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13241
37c67319 13242 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13243 Assume they'd only be the first character.... */
37c67319
GK
13244 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13245 *s++ = '%';
94a54f47 13246 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13247
13248 if (label != NULL)
13249 {
13250 /* If the branch distance was too far, we may have to use an
13251 unconditional branch to go the distance. */
13252 if (need_longbranch)
44518ddd 13253 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13254 else
13255 s += sprintf (s, ",%s", label);
13256 }
13257
13258 return string;
13259}
50a0b056 13260
64022b5d 13261/* Return the string to flip the GT bit on a CR. */
423c1189 13262char *
64022b5d 13263output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13264{
13265 static char string[64];
13266 int a, b;
13267
37409796
NS
13268 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13269 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13270
64022b5d
AH
13271 /* GT bit. */
13272 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13273 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13274
13275 sprintf (string, "crnot %d,%d", a, b);
13276 return string;
13277}
13278
21213b4c
DP
13279/* Return insn index for the vector compare instruction for given CODE,
13280 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13281 not available. */
13282
13283static int
94ff898d 13284get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13285 enum machine_mode dest_mode,
13286 enum machine_mode op_mode)
13287{
13288 if (!TARGET_ALTIVEC)
13289 return INSN_NOT_AVAILABLE;
13290
13291 switch (code)
13292 {
13293 case EQ:
13294 if (dest_mode == V16QImode && op_mode == V16QImode)
13295 return UNSPEC_VCMPEQUB;
13296 if (dest_mode == V8HImode && op_mode == V8HImode)
13297 return UNSPEC_VCMPEQUH;
13298 if (dest_mode == V4SImode && op_mode == V4SImode)
13299 return UNSPEC_VCMPEQUW;
13300 if (dest_mode == V4SImode && op_mode == V4SFmode)
13301 return UNSPEC_VCMPEQFP;
13302 break;
13303 case GE:
13304 if (dest_mode == V4SImode && op_mode == V4SFmode)
13305 return UNSPEC_VCMPGEFP;
13306 case GT:
13307 if (dest_mode == V16QImode && op_mode == V16QImode)
13308 return UNSPEC_VCMPGTSB;
13309 if (dest_mode == V8HImode && op_mode == V8HImode)
13310 return UNSPEC_VCMPGTSH;
13311 if (dest_mode == V4SImode && op_mode == V4SImode)
13312 return UNSPEC_VCMPGTSW;
13313 if (dest_mode == V4SImode && op_mode == V4SFmode)
13314 return UNSPEC_VCMPGTFP;
13315 break;
13316 case GTU:
13317 if (dest_mode == V16QImode && op_mode == V16QImode)
13318 return UNSPEC_VCMPGTUB;
13319 if (dest_mode == V8HImode && op_mode == V8HImode)
13320 return UNSPEC_VCMPGTUH;
13321 if (dest_mode == V4SImode && op_mode == V4SImode)
13322 return UNSPEC_VCMPGTUW;
13323 break;
13324 default:
13325 break;
13326 }
13327 return INSN_NOT_AVAILABLE;
13328}
13329
13330/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13331 DMODE is expected destination mode. This is a recursive function. */
13332
13333static rtx
13334rs6000_emit_vector_compare (enum rtx_code rcode,
13335 rtx op0, rtx op1,
13336 enum machine_mode dmode)
13337{
13338 int vec_cmp_insn;
13339 rtx mask;
13340 enum machine_mode dest_mode;
13341 enum machine_mode op_mode = GET_MODE (op1);
13342
37409796
NS
13343 gcc_assert (TARGET_ALTIVEC);
13344 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13345
13346 /* Floating point vector compare instructions uses destination V4SImode.
13347 Move destination to appropriate mode later. */
13348 if (dmode == V4SFmode)
13349 dest_mode = V4SImode;
13350 else
13351 dest_mode = dmode;
13352
13353 mask = gen_reg_rtx (dest_mode);
13354 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13355
13356 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13357 {
13358 bool swap_operands = false;
13359 bool try_again = false;
13360 switch (rcode)
13361 {
13362 case LT:
13363 rcode = GT;
13364 swap_operands = true;
13365 try_again = true;
13366 break;
13367 case LTU:
13368 rcode = GTU;
13369 swap_operands = true;
13370 try_again = true;
13371 break;
13372 case NE:
370df7db
JC
13373 case UNLE:
13374 case UNLT:
13375 case UNGE:
13376 case UNGT:
13377 /* Invert condition and try again.
13378 e.g., A != B becomes ~(A==B). */
21213b4c 13379 {
370df7db 13380 enum rtx_code rev_code;
21213b4c 13381 enum insn_code nor_code;
d1123cde 13382 rtx eq_rtx;
370df7db
JC
13383
13384 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13385 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13386 dest_mode);
94ff898d 13387
166cdb08 13388 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13389 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13390 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13391
13392 if (dmode != dest_mode)
13393 {
13394 rtx temp = gen_reg_rtx (dest_mode);
13395 convert_move (temp, mask, 0);
13396 return temp;
13397 }
13398 return mask;
13399 }
13400 break;
13401 case GE:
13402 case GEU:
13403 case LE:
13404 case LEU:
13405 /* Try GT/GTU/LT/LTU OR EQ */
13406 {
13407 rtx c_rtx, eq_rtx;
13408 enum insn_code ior_code;
13409 enum rtx_code new_code;
13410
37409796
NS
13411 switch (rcode)
13412 {
13413 case GE:
13414 new_code = GT;
13415 break;
13416
13417 case GEU:
13418 new_code = GTU;
13419 break;
13420
13421 case LE:
13422 new_code = LT;
13423 break;
13424
13425 case LEU:
13426 new_code = LTU;
13427 break;
13428
13429 default:
13430 gcc_unreachable ();
13431 }
21213b4c
DP
13432
13433 c_rtx = rs6000_emit_vector_compare (new_code,
13434 op0, op1, dest_mode);
13435 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13436 dest_mode);
13437
166cdb08 13438 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13439 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13440 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13441 if (dmode != dest_mode)
13442 {
13443 rtx temp = gen_reg_rtx (dest_mode);
13444 convert_move (temp, mask, 0);
13445 return temp;
13446 }
13447 return mask;
13448 }
13449 break;
13450 default:
37409796 13451 gcc_unreachable ();
21213b4c
DP
13452 }
13453
13454 if (try_again)
13455 {
13456 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13457 /* You only get two chances. */
13458 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13459 }
13460
13461 if (swap_operands)
13462 {
13463 rtx tmp;
13464 tmp = op0;
13465 op0 = op1;
13466 op1 = tmp;
13467 }
13468 }
13469
915167f5
GK
13470 emit_insn (gen_rtx_SET (VOIDmode, mask,
13471 gen_rtx_UNSPEC (dest_mode,
13472 gen_rtvec (2, op0, op1),
13473 vec_cmp_insn)));
21213b4c
DP
13474 if (dmode != dest_mode)
13475 {
13476 rtx temp = gen_reg_rtx (dest_mode);
13477 convert_move (temp, mask, 0);
13478 return temp;
13479 }
13480 return mask;
13481}
13482
13483/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13484 valid insn doesn exist for given mode. */
13485
13486static int
13487get_vsel_insn (enum machine_mode mode)
13488{
13489 switch (mode)
13490 {
13491 case V4SImode:
13492 return UNSPEC_VSEL4SI;
13493 break;
13494 case V4SFmode:
13495 return UNSPEC_VSEL4SF;
13496 break;
13497 case V8HImode:
13498 return UNSPEC_VSEL8HI;
13499 break;
13500 case V16QImode:
13501 return UNSPEC_VSEL16QI;
13502 break;
13503 default:
13504 return INSN_NOT_AVAILABLE;
13505 break;
13506 }
13507 return INSN_NOT_AVAILABLE;
13508}
13509
13510/* Emit vector select insn where DEST is destination using
13511 operands OP1, OP2 and MASK. */
13512
13513static void
13514rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13515{
13516 rtx t, temp;
13517 enum machine_mode dest_mode = GET_MODE (dest);
13518 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13519
13520 temp = gen_reg_rtx (dest_mode);
94ff898d 13521
bb8df8a6 13522 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13523 select op2. */
915167f5
GK
13524 t = gen_rtx_SET (VOIDmode, temp,
13525 gen_rtx_UNSPEC (dest_mode,
13526 gen_rtvec (3, op2, op1, mask),
13527 vsel_insn_index));
21213b4c
DP
13528 emit_insn (t);
13529 emit_move_insn (dest, temp);
13530 return;
13531}
13532
94ff898d 13533/* Emit vector conditional expression.
21213b4c
DP
13534 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13535 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13536
13537int
13538rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13539 rtx cond, rtx cc_op0, rtx cc_op1)
13540{
13541 enum machine_mode dest_mode = GET_MODE (dest);
13542 enum rtx_code rcode = GET_CODE (cond);
13543 rtx mask;
13544
13545 if (!TARGET_ALTIVEC)
13546 return 0;
13547
13548 /* Get the vector mask for the given relational operations. */
13549 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13550
13551 rs6000_emit_vector_select (dest, op1, op2, mask);
13552
13553 return 1;
13554}
13555
50a0b056
GK
13556/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13557 operands of the last comparison is nonzero/true, FALSE_COND if it
13558 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13559
50a0b056 13560int
a2369ed3 13561rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13562{
13563 enum rtx_code code = GET_CODE (op);
f90b7a5a
PB
13564 rtx op0 = XEXP (op, 0);
13565 rtx op1 = XEXP (op, 1);
50a0b056 13566 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13567 enum machine_mode compare_mode = GET_MODE (op0);
13568 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13569 rtx temp;
add2402e 13570 bool is_against_zero;
50a0b056 13571
a3c9585f 13572 /* These modes should always match. */
a3170dc6
AH
13573 if (GET_MODE (op1) != compare_mode
13574 /* In the isel case however, we can use a compare immediate, so
13575 op1 may be a small constant. */
13576 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13577 return 0;
178c3eff 13578 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13579 return 0;
178c3eff 13580 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13581 return 0;
13582
50a0b056 13583 /* First, work out if the hardware can do this at all, or
a3c9585f 13584 if it's too slow.... */
f90b7a5a 13585 if (!FLOAT_MODE_P (compare_mode))
a3170dc6
AH
13586 {
13587 if (TARGET_ISEL)
13588 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13589 return 0;
13590 }
8ef65e3d 13591 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13592 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13593 return 0;
50a0b056 13594
add2402e 13595 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13596
add2402e
GK
13597 /* A floating-point subtract might overflow, underflow, or produce
13598 an inexact result, thus changing the floating-point flags, so it
13599 can't be generated if we care about that. It's safe if one side
13600 of the construct is zero, since then no subtract will be
13601 generated. */
ebb109ad 13602 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13603 && flag_trapping_math && ! is_against_zero)
13604 return 0;
13605
50a0b056
GK
13606 /* Eliminate half of the comparisons by switching operands, this
13607 makes the remaining code simpler. */
13608 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13609 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13610 {
13611 code = reverse_condition_maybe_unordered (code);
13612 temp = true_cond;
13613 true_cond = false_cond;
13614 false_cond = temp;
13615 }
13616
13617 /* UNEQ and LTGT take four instructions for a comparison with zero,
13618 it'll probably be faster to use a branch here too. */
bc9ec0e0 13619 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13620 return 0;
f676971a 13621
50a0b056
GK
13622 if (GET_CODE (op1) == CONST_DOUBLE)
13623 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13624
b6d08ca1 13625 /* We're going to try to implement comparisons by performing
50a0b056
GK
13626 a subtract, then comparing against zero. Unfortunately,
13627 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13628 know that the operand is finite and the comparison
50a0b056 13629 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13630 if (HONOR_INFINITIES (compare_mode)
50a0b056 13631 && code != GT && code != UNGE
045572c7 13632 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13633 /* Constructs of the form (a OP b ? a : b) are safe. */
13634 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13635 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13636 && ! rtx_equal_p (op1, true_cond))))
13637 return 0;
add2402e 13638
50a0b056
GK
13639 /* At this point we know we can use fsel. */
13640
13641 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13642 if (! is_against_zero)
13643 {
13644 temp = gen_reg_rtx (compare_mode);
13645 emit_insn (gen_rtx_SET (VOIDmode, temp,
13646 gen_rtx_MINUS (compare_mode, op0, op1)));
13647 op0 = temp;
13648 op1 = CONST0_RTX (compare_mode);
13649 }
50a0b056
GK
13650
13651 /* If we don't care about NaNs we can reduce some of the comparisons
13652 down to faster ones. */
bc9ec0e0 13653 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13654 switch (code)
13655 {
13656 case GT:
13657 code = LE;
13658 temp = true_cond;
13659 true_cond = false_cond;
13660 false_cond = temp;
13661 break;
13662 case UNGE:
13663 code = GE;
13664 break;
13665 case UNEQ:
13666 code = EQ;
13667 break;
13668 default:
13669 break;
13670 }
13671
13672 /* Now, reduce everything down to a GE. */
13673 switch (code)
13674 {
13675 case GE:
13676 break;
13677
13678 case LE:
3148ad6d
DJ
13679 temp = gen_reg_rtx (compare_mode);
13680 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13681 op0 = temp;
13682 break;
13683
13684 case ORDERED:
3148ad6d
DJ
13685 temp = gen_reg_rtx (compare_mode);
13686 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13687 op0 = temp;
13688 break;
13689
13690 case EQ:
3148ad6d 13691 temp = gen_reg_rtx (compare_mode);
f676971a 13692 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13693 gen_rtx_NEG (compare_mode,
13694 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13695 op0 = temp;
13696 break;
13697
13698 case UNGE:
bc9ec0e0 13699 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13700 temp = gen_reg_rtx (result_mode);
50a0b056 13701 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13702 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13703 gen_rtx_GE (VOIDmode,
13704 op0, op1),
13705 true_cond, false_cond)));
bc9ec0e0
GK
13706 false_cond = true_cond;
13707 true_cond = temp;
50a0b056 13708
3148ad6d
DJ
13709 temp = gen_reg_rtx (compare_mode);
13710 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13711 op0 = temp;
13712 break;
13713
13714 case GT:
bc9ec0e0 13715 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13716 temp = gen_reg_rtx (result_mode);
50a0b056 13717 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13718 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13719 gen_rtx_GE (VOIDmode,
13720 op0, op1),
13721 true_cond, false_cond)));
bc9ec0e0
GK
13722 true_cond = false_cond;
13723 false_cond = temp;
50a0b056 13724
3148ad6d
DJ
13725 temp = gen_reg_rtx (compare_mode);
13726 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13727 op0 = temp;
13728 break;
13729
13730 default:
37409796 13731 gcc_unreachable ();
50a0b056
GK
13732 }
13733
13734 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13735 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13736 gen_rtx_GE (VOIDmode,
13737 op0, op1),
13738 true_cond, false_cond)));
13739 return 1;
13740}
13741
a3170dc6
AH
13742/* Same as above, but for ints (isel). */
13743
13744static int
a2369ed3 13745rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13746{
13747 rtx condition_rtx, cr;
13748
13749 /* All isel implementations thus far are 32-bits. */
f90b7a5a 13750 if (GET_MODE (XEXP (op, 0)) != SImode)
a3170dc6
AH
13751 return 0;
13752
13753 /* We still have to do the compare, because isel doesn't do a
13754 compare, it just looks at the CRx bits set by a previous compare
13755 instruction. */
f90b7a5a 13756 condition_rtx = rs6000_generate_compare (op, SImode);
a3170dc6
AH
13757 cr = XEXP (condition_rtx, 0);
13758
13759 if (GET_MODE (cr) == CCmode)
13760 emit_insn (gen_isel_signed (dest, condition_rtx,
13761 true_cond, false_cond, cr));
13762 else
13763 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13764 true_cond, false_cond, cr));
13765
13766 return 1;
13767}
13768
13769const char *
a2369ed3 13770output_isel (rtx *operands)
a3170dc6
AH
13771{
13772 enum rtx_code code;
13773
13774 code = GET_CODE (operands[1]);
13775 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13776 {
13777 PUT_CODE (operands[1], reverse_condition (code));
13778 return "isel %0,%3,%2,%j1";
13779 }
13780 else
13781 return "isel %0,%2,%3,%j1";
13782}
13783
50a0b056 13784void
a2369ed3 13785rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13786{
13787 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13788 enum rtx_code c;
50a0b056 13789 rtx target;
5dc8d536
AH
13790
13791 if (code == SMAX || code == SMIN)
13792 c = GE;
13793 else
13794 c = GEU;
13795
50a0b056 13796 if (code == SMAX || code == UMAX)
f676971a 13797 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13798 op0, op1, mode, 0);
13799 else
f676971a 13800 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13801 op1, op0, mode, 0);
37409796 13802 gcc_assert (target);
50a0b056
GK
13803 if (target != dest)
13804 emit_move_insn (dest, target);
13805}
46c07df8 13806
915167f5
GK
13807/* Emit instructions to perform a load-reserved/store-conditional operation.
13808 The operation performed is an atomic
13809 (set M (CODE:MODE M OP))
13810 If not NULL, BEFORE is atomically set to M before the operation, and
13811 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13812 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13813 Either OP or M may be wrapped in a NOT operation. */
13814
13815void
13816rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13817 rtx m, rtx op, rtx before_param, rtx after_param,
13818 bool sync_p)
13819{
13820 enum machine_mode used_mode;
13821 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13822 rtx used_m;
13823 rtvec vec;
13824 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13825 rtx shift = NULL_RTX;
bb8df8a6 13826
915167f5 13827 if (sync_p)
a3cb4b82 13828 emit_insn (gen_lwsync ());
bb8df8a6 13829
915167f5
GK
13830 used_m = m;
13831
13832 /* If this is smaller than SImode, we'll have to use SImode with
13833 adjustments. */
13834 if (mode == QImode || mode == HImode)
13835 {
13836 rtx newop, oldop;
13837
13838 if (MEM_ALIGN (used_m) >= 32)
13839 {
13840 int ishift = 0;
13841 if (BYTES_BIG_ENDIAN)
13842 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13843
915167f5 13844 shift = GEN_INT (ishift);
c75c6d11 13845 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13846 }
13847 else
13848 {
13849 rtx addrSI, aligned_addr;
a9c9d3fa 13850 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13851
c75c6d11
JJ
13852 addrSI = gen_lowpart_common (SImode,
13853 force_reg (Pmode, XEXP (used_m, 0)));
13854 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13855 shift = gen_reg_rtx (SImode);
13856
13857 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13858 GEN_INT (shift_mask)));
13859 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13860
13861 aligned_addr = expand_binop (Pmode, and_optab,
13862 XEXP (used_m, 0),
13863 GEN_INT (-4), NULL_RTX,
13864 1, OPTAB_LIB_WIDEN);
13865 used_m = change_address (used_m, SImode, aligned_addr);
13866 set_mem_align (used_m, 32);
915167f5 13867 }
c75c6d11
JJ
13868 /* It's safe to keep the old alias set of USED_M, because
13869 the operation is atomic and only affects the original
13870 USED_M. */
f66f9865 13871 m = used_m;
915167f5
GK
13872
13873 if (GET_CODE (op) == NOT)
13874 {
13875 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13876 oldop = gen_rtx_NOT (SImode, oldop);
13877 }
13878 else
13879 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13880
915167f5
GK
13881 switch (code)
13882 {
13883 case IOR:
13884 case XOR:
13885 newop = expand_binop (SImode, and_optab,
13886 oldop, GEN_INT (imask), NULL_RTX,
13887 1, OPTAB_LIB_WIDEN);
13888 emit_insn (gen_ashlsi3 (newop, newop, shift));
13889 break;
13890
f66f9865
DE
13891 case NOT: /* NAND */
13892 newop = expand_binop (SImode, ior_optab,
13893 oldop, GEN_INT (~imask), NULL_RTX,
13894 1, OPTAB_LIB_WIDEN);
13895 emit_insn (gen_rotlsi3 (newop, newop, shift));
13896 break;
13897
915167f5
GK
13898 case AND:
13899 newop = expand_binop (SImode, ior_optab,
13900 oldop, GEN_INT (~imask), NULL_RTX,
13901 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13902 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13903 break;
13904
13905 case PLUS:
9f0076e5 13906 case MINUS:
915167f5
GK
13907 {
13908 rtx mask;
bb8df8a6 13909
915167f5
GK
13910 newop = expand_binop (SImode, and_optab,
13911 oldop, GEN_INT (imask), NULL_RTX,
13912 1, OPTAB_LIB_WIDEN);
13913 emit_insn (gen_ashlsi3 (newop, newop, shift));
13914
13915 mask = gen_reg_rtx (SImode);
13916 emit_move_insn (mask, GEN_INT (imask));
13917 emit_insn (gen_ashlsi3 (mask, mask, shift));
13918
9f0076e5
DE
13919 if (code == PLUS)
13920 newop = gen_rtx_PLUS (SImode, m, newop);
13921 else
13922 newop = gen_rtx_MINUS (SImode, m, newop);
13923 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13924 newop = gen_rtx_IOR (SImode, newop,
13925 gen_rtx_AND (SImode,
13926 gen_rtx_NOT (SImode, mask),
13927 m));
13928 break;
13929 }
13930
13931 default:
13932 gcc_unreachable ();
13933 }
13934
13935 op = newop;
13936 used_mode = SImode;
13937 before = gen_reg_rtx (used_mode);
13938 after = gen_reg_rtx (used_mode);
13939 }
13940 else
13941 {
13942 used_mode = mode;
13943 before = before_param;
13944 after = after_param;
13945
13946 if (before == NULL_RTX)
13947 before = gen_reg_rtx (used_mode);
13948 if (after == NULL_RTX)
13949 after = gen_reg_rtx (used_mode);
13950 }
bb8df8a6 13951
f66f9865 13952 if ((code == PLUS || code == MINUS)
9f0076e5 13953 && used_mode != mode)
915167f5
GK
13954 the_op = op; /* Computed above. */
13955 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13956 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
f66f9865
DE
13957 else if (code == NOT)
13958 the_op = gen_rtx_fmt_ee (IOR, used_mode,
13959 gen_rtx_NOT (used_mode, m),
13960 gen_rtx_NOT (used_mode, op));
915167f5
GK
13961 else
13962 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13963
13964 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13965 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13966 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13967 gen_rtx_UNSPEC (used_mode,
13968 gen_rtvec (1, the_op),
13969 UNSPEC_SYNC_OP));
915167f5
GK
13970 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13971
9f0076e5 13972 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13973 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13974 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13975 else
13976 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13977 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13978
13979 /* Shift and mask the return values properly. */
13980 if (used_mode != mode && before_param)
13981 {
13982 emit_insn (gen_lshrsi3 (before, before, shift));
13983 convert_move (before_param, before, 1);
13984 }
13985
13986 if (used_mode != mode && after_param)
13987 {
13988 emit_insn (gen_lshrsi3 (after, after, shift));
13989 convert_move (after_param, after, 1);
13990 }
13991
13992 /* The previous sequence will end with a branch that's dependent on
13993 the conditional store, so placing an isync will ensure that no
13994 other instructions (especially, no load or store instructions)
13995 can start before the atomic operation completes. */
13996 if (sync_p)
13997 emit_insn (gen_isync ());
13998}
13999
b52110d4
DE
14000/* A subroutine of the atomic operation splitters. Jump to LABEL if
14001 COND is true. Mark the jump as unlikely to be taken. */
14002
14003static void
14004emit_unlikely_jump (rtx cond, rtx label)
14005{
14006 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14007 rtx x;
14008
14009 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14010 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
bbbbb16a 14011 add_reg_note (x, REG_BR_PROB, very_unlikely);
b52110d4
DE
14012}
14013
14014/* A subroutine of the atomic operation splitters. Emit a load-locked
14015 instruction in MODE. */
14016
14017static void
14018emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14019{
14020 rtx (*fn) (rtx, rtx) = NULL;
14021 if (mode == SImode)
14022 fn = gen_load_locked_si;
14023 else if (mode == DImode)
14024 fn = gen_load_locked_di;
14025 emit_insn (fn (reg, mem));
14026}
14027
14028/* A subroutine of the atomic operation splitters. Emit a store-conditional
14029 instruction in MODE. */
14030
14031static void
14032emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14033{
14034 rtx (*fn) (rtx, rtx, rtx) = NULL;
14035 if (mode == SImode)
14036 fn = gen_store_conditional_si;
14037 else if (mode == DImode)
14038 fn = gen_store_conditional_di;
14039
9f0076e5 14040 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14041 if (PPC405_ERRATUM77)
14042 emit_insn (gen_memory_barrier ());
14043
14044 emit_insn (fn (res, mem, val));
14045}
14046
ea2c620c 14047/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14048 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14049 operand of the binary operator. BEFORE and AFTER are optional locations to
14050 return the value of MEM either before of after the operation. SCRATCH is
14051 a scratch register. */
14052
14053void
14054rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14055 rtx before, rtx after, rtx scratch)
14056{
14057 enum machine_mode mode = GET_MODE (mem);
14058 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14059
a3cb4b82 14060 emit_insn (gen_lwsync ());
9f0076e5
DE
14061
14062 label = gen_label_rtx ();
14063 emit_label (label);
14064 label = gen_rtx_LABEL_REF (VOIDmode, label);
14065
14066 if (before == NULL_RTX)
14067 before = scratch;
14068 emit_load_locked (mode, before, mem);
14069
14070 if (code == NOT)
f66f9865
DE
14071 x = gen_rtx_IOR (mode,
14072 gen_rtx_NOT (mode, before),
14073 gen_rtx_NOT (mode, val));
9f0076e5
DE
14074 else if (code == AND)
14075 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14076 else
14077 x = gen_rtx_fmt_ee (code, mode, before, val);
14078
14079 if (after != NULL_RTX)
14080 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14081 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14082
14083 emit_store_conditional (mode, cond, mem, scratch);
14084
14085 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14086 emit_unlikely_jump (x, label);
14087
14088 emit_insn (gen_isync ());
14089}
14090
b52110d4
DE
14091/* Expand an atomic compare and swap operation. MEM is the memory on which
14092 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14093 value to be stored. SCRATCH is a scratch GPR. */
14094
14095void
14096rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14097 rtx scratch)
14098{
14099 enum machine_mode mode = GET_MODE (mem);
14100 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14101
a3cb4b82 14102 emit_insn (gen_lwsync ());
b52110d4
DE
14103
14104 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14105 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14106 emit_label (XEXP (label1, 0));
14107
14108 emit_load_locked (mode, retval, mem);
14109
14110 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14111 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14112
14113 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14114 emit_unlikely_jump (x, label2);
14115
14116 emit_move_insn (scratch, newval);
14117 emit_store_conditional (mode, cond, mem, scratch);
14118
14119 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14120 emit_unlikely_jump (x, label1);
14121
14122 emit_insn (gen_isync ());
14123 emit_label (XEXP (label2, 0));
14124}
14125
14126/* Expand an atomic test and set operation. MEM is the memory on which
14127 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14128
14129void
14130rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14131{
14132 enum machine_mode mode = GET_MODE (mem);
14133 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14134
b52110d4
DE
14135 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14136 emit_label (XEXP (label, 0));
14137
14138 emit_load_locked (mode, retval, mem);
14139 emit_move_insn (scratch, val);
14140 emit_store_conditional (mode, cond, mem, scratch);
14141
14142 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14143 emit_unlikely_jump (x, label);
14144
14145 emit_insn (gen_isync ());
14146}
14147
9fc75b97
DE
14148void
14149rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14150{
14151 enum machine_mode mode = GET_MODE (mem);
14152 rtx addrSI, align, wdst, shift, mask;
14153 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14154 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14155
14156 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14157 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14158 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14159 shift = gen_reg_rtx (SImode);
14160 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14161 GEN_INT (shift_mask)));
14162 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14163
14164 /* Shift and mask old value into position within word. */
14165 oldval = convert_modes (SImode, mode, oldval, 1);
14166 oldval = expand_binop (SImode, and_optab,
14167 oldval, GEN_INT (imask), NULL_RTX,
14168 1, OPTAB_LIB_WIDEN);
14169 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14170
14171 /* Shift and mask new value into position within word. */
14172 newval = convert_modes (SImode, mode, newval, 1);
14173 newval = expand_binop (SImode, and_optab,
14174 newval, GEN_INT (imask), NULL_RTX,
14175 1, OPTAB_LIB_WIDEN);
14176 emit_insn (gen_ashlsi3 (newval, newval, shift));
14177
14178 /* Mask for insertion. */
14179 mask = gen_reg_rtx (SImode);
14180 emit_move_insn (mask, GEN_INT (imask));
14181 emit_insn (gen_ashlsi3 (mask, mask, shift));
14182
14183 /* Address of aligned word containing subword. */
14184 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14185 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14186 mem = change_address (mem, SImode, align);
14187 set_mem_align (mem, 32);
14188 MEM_VOLATILE_P (mem) = 1;
14189
14190 wdst = gen_reg_rtx (SImode);
14191 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14192 oldval, newval, mem));
14193
2725b75c
JJ
14194 /* Shift the result back. */
14195 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14196
9fc75b97
DE
14197 emit_move_insn (dst, gen_lowpart (mode, wdst));
14198}
14199
14200void
14201rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14202 rtx oldval, rtx newval, rtx mem,
14203 rtx scratch)
14204{
14205 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14206
a3cb4b82 14207 emit_insn (gen_lwsync ());
9fc75b97
DE
14208 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14209 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14210 emit_label (XEXP (label1, 0));
14211
14212 emit_load_locked (SImode, scratch, mem);
14213
14214 /* Mask subword within loaded value for comparison with oldval.
14215 Use UNSPEC_AND to avoid clobber.*/
14216 emit_insn (gen_rtx_SET (SImode, dest,
14217 gen_rtx_UNSPEC (SImode,
14218 gen_rtvec (2, scratch, mask),
14219 UNSPEC_AND)));
14220
14221 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14222 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14223
14224 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14225 emit_unlikely_jump (x, label2);
14226
14227 /* Clear subword within loaded value for insertion of new value. */
14228 emit_insn (gen_rtx_SET (SImode, scratch,
14229 gen_rtx_AND (SImode,
14230 gen_rtx_NOT (SImode, mask), scratch)));
14231 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14232 emit_store_conditional (SImode, cond, mem, scratch);
14233
14234 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14235 emit_unlikely_jump (x, label1);
14236
14237 emit_insn (gen_isync ());
14238 emit_label (XEXP (label2, 0));
14239}
14240
14241
b52110d4 14242 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14243 multi-register moves. It will emit at most one instruction for
14244 each register that is accessed; that is, it won't emit li/lis pairs
14245 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14246 register. */
46c07df8 14247
46c07df8 14248void
a9baceb1 14249rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14250{
a9baceb1
GK
14251 /* The register number of the first register being moved. */
14252 int reg;
14253 /* The mode that is to be moved. */
14254 enum machine_mode mode;
14255 /* The mode that the move is being done in, and its size. */
14256 enum machine_mode reg_mode;
14257 int reg_mode_size;
14258 /* The number of registers that will be moved. */
14259 int nregs;
14260
14261 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14262 mode = GET_MODE (dst);
c8b622ff 14263 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14264 if (FP_REGNO_P (reg))
696e45ba
ME
14265 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14266 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14267 else if (ALTIVEC_REGNO_P (reg))
14268 reg_mode = V16QImode;
4f011e1e
JM
14269 else if (TARGET_E500_DOUBLE && mode == TFmode)
14270 reg_mode = DFmode;
a9baceb1
GK
14271 else
14272 reg_mode = word_mode;
14273 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14274
37409796 14275 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14276
a9baceb1
GK
14277 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14278 {
14279 /* Move register range backwards, if we might have destructive
14280 overlap. */
14281 int i;
14282 for (i = nregs - 1; i >= 0; i--)
f676971a 14283 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14284 simplify_gen_subreg (reg_mode, dst, mode,
14285 i * reg_mode_size),
14286 simplify_gen_subreg (reg_mode, src, mode,
14287 i * reg_mode_size)));
14288 }
46c07df8
HP
14289 else
14290 {
a9baceb1
GK
14291 int i;
14292 int j = -1;
14293 bool used_update = false;
46c07df8 14294
c1e55850 14295 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14296 {
14297 rtx breg;
3a1f863f 14298
a9baceb1
GK
14299 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14300 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14301 {
14302 rtx delta_rtx;
a9baceb1 14303 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14304 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14305 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14306 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14307 emit_insn (TARGET_32BIT
14308 ? gen_addsi3 (breg, breg, delta_rtx)
14309 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14310 src = replace_equiv_address (src, breg);
3a1f863f 14311 }
d04b6e6e 14312 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14313 {
13e2e16e 14314 rtx basereg;
c1e55850
GK
14315 basereg = gen_rtx_REG (Pmode, reg);
14316 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14317 src = replace_equiv_address (src, basereg);
c1e55850 14318 }
3a1f863f 14319
0423421f
AM
14320 breg = XEXP (src, 0);
14321 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14322 breg = XEXP (breg, 0);
14323
14324 /* If the base register we are using to address memory is
14325 also a destination reg, then change that register last. */
14326 if (REG_P (breg)
14327 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14328 && REGNO (breg) < REGNO (dst) + nregs)
14329 j = REGNO (breg) - REGNO (dst);
c4ad648e 14330 }
46c07df8 14331
a9baceb1 14332 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14333 {
14334 rtx breg;
14335
a9baceb1
GK
14336 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14337 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14338 {
14339 rtx delta_rtx;
a9baceb1 14340 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14341 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14342 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14343 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14344
14345 /* We have to update the breg before doing the store.
14346 Use store with update, if available. */
14347
14348 if (TARGET_UPDATE)
14349 {
a9baceb1 14350 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14351 emit_insn (TARGET_32BIT
14352 ? (TARGET_POWERPC64
14353 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14354 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14355 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14356 used_update = true;
3a1f863f
DE
14357 }
14358 else
a9baceb1
GK
14359 emit_insn (TARGET_32BIT
14360 ? gen_addsi3 (breg, breg, delta_rtx)
14361 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14362 dst = replace_equiv_address (dst, breg);
3a1f863f 14363 }
37409796 14364 else
d04b6e6e 14365 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14366 }
14367
46c07df8 14368 for (i = 0; i < nregs; i++)
f676971a 14369 {
3a1f863f
DE
14370 /* Calculate index to next subword. */
14371 ++j;
f676971a 14372 if (j == nregs)
3a1f863f 14373 j = 0;
46c07df8 14374
112cdef5 14375 /* If compiler already emitted move of first word by
a9baceb1 14376 store with update, no need to do anything. */
3a1f863f 14377 if (j == 0 && used_update)
a9baceb1 14378 continue;
f676971a 14379
a9baceb1
GK
14380 emit_insn (gen_rtx_SET (VOIDmode,
14381 simplify_gen_subreg (reg_mode, dst, mode,
14382 j * reg_mode_size),
14383 simplify_gen_subreg (reg_mode, src, mode,
14384 j * reg_mode_size)));
3a1f863f 14385 }
46c07df8
HP
14386 }
14387}
14388
12a4e8c5 14389\f
a4f6c312
SS
14390/* This page contains routines that are used to determine what the
14391 function prologue and epilogue code will do and write them out. */
9878760c 14392
a4f6c312
SS
14393/* Return the first fixed-point register that is required to be
14394 saved. 32 if none. */
9878760c
RK
14395
14396int
863d938c 14397first_reg_to_save (void)
9878760c
RK
14398{
14399 int first_reg;
14400
14401 /* Find lowest numbered live register. */
14402 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14403 if (df_regs_ever_live_p (first_reg)
a38d360d 14404 && (! call_used_regs[first_reg]
1db02437 14405 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14406 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14407 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14408 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14409 break;
14410
ee890fe2 14411#if TARGET_MACHO
93638d7a 14412 if (flag_pic
e3b5732b 14413 && crtl->uses_pic_offset_table
93638d7a 14414 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14415 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14416#endif
14417
9878760c
RK
14418 return first_reg;
14419}
14420
14421/* Similar, for FP regs. */
14422
14423int
863d938c 14424first_fp_reg_to_save (void)
9878760c
RK
14425{
14426 int first_reg;
14427
14428 /* Find lowest numbered live register. */
14429 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14430 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14431 break;
14432
14433 return first_reg;
14434}
00b960c7
AH
14435
14436/* Similar, for AltiVec regs. */
14437
14438static int
863d938c 14439first_altivec_reg_to_save (void)
00b960c7
AH
14440{
14441 int i;
14442
14443 /* Stack frame remains as is unless we are in AltiVec ABI. */
14444 if (! TARGET_ALTIVEC_ABI)
14445 return LAST_ALTIVEC_REGNO + 1;
14446
22fa69da 14447 /* On Darwin, the unwind routines are compiled without
982afe02 14448 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14449 altivec registers when necessary. */
e3b5732b 14450 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14451 && ! TARGET_ALTIVEC)
14452 return FIRST_ALTIVEC_REGNO + 20;
14453
00b960c7
AH
14454 /* Find lowest numbered live register. */
14455 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14456 if (df_regs_ever_live_p (i))
00b960c7
AH
14457 break;
14458
14459 return i;
14460}
14461
14462/* Return a 32-bit mask of the AltiVec registers we need to set in
14463 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14464 the 32-bit word is 0. */
14465
14466static unsigned int
863d938c 14467compute_vrsave_mask (void)
00b960c7
AH
14468{
14469 unsigned int i, mask = 0;
14470
22fa69da 14471 /* On Darwin, the unwind routines are compiled without
982afe02 14472 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14473 call-saved altivec registers when necessary. */
e3b5732b 14474 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14475 && ! TARGET_ALTIVEC)
14476 mask |= 0xFFF;
14477
00b960c7
AH
14478 /* First, find out if we use _any_ altivec registers. */
14479 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14480 if (df_regs_ever_live_p (i))
00b960c7
AH
14481 mask |= ALTIVEC_REG_BIT (i);
14482
14483 if (mask == 0)
14484 return mask;
14485
00b960c7
AH
14486 /* Next, remove the argument registers from the set. These must
14487 be in the VRSAVE mask set by the caller, so we don't need to add
14488 them in again. More importantly, the mask we compute here is
14489 used to generate CLOBBERs in the set_vrsave insn, and we do not
14490 wish the argument registers to die. */
38173d38 14491 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14492 mask &= ~ALTIVEC_REG_BIT (i);
14493
14494 /* Similarly, remove the return value from the set. */
14495 {
14496 bool yes = false;
14497 diddle_return_value (is_altivec_return_reg, &yes);
14498 if (yes)
14499 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14500 }
14501
14502 return mask;
14503}
14504
d62294f5 14505/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14506 size of prologues/epilogues by calling our own save/restore-the-world
14507 routines. */
d62294f5
FJ
14508
14509static void
f57fe068
AM
14510compute_save_world_info (rs6000_stack_t *info_ptr)
14511{
14512 info_ptr->world_save_p = 1;
14513 info_ptr->world_save_p
14514 = (WORLD_SAVE_P (info_ptr)
14515 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14516 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14517 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14518 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14519 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14520 && info_ptr->cr_save_p);
f676971a 14521
d62294f5
FJ
14522 /* This will not work in conjunction with sibcalls. Make sure there
14523 are none. (This check is expensive, but seldom executed.) */
f57fe068 14524 if (WORLD_SAVE_P (info_ptr))
f676971a 14525 {
d62294f5
FJ
14526 rtx insn;
14527 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14528 if ( GET_CODE (insn) == CALL_INSN
14529 && SIBLING_CALL_P (insn))
14530 {
14531 info_ptr->world_save_p = 0;
14532 break;
14533 }
d62294f5 14534 }
f676971a 14535
f57fe068 14536 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14537 {
14538 /* Even if we're not touching VRsave, make sure there's room on the
14539 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14540 will attempt to save it. */
d62294f5
FJ
14541 info_ptr->vrsave_size = 4;
14542
298ac1dd
AP
14543 /* If we are going to save the world, we need to save the link register too. */
14544 info_ptr->lr_save_p = 1;
14545
d62294f5
FJ
14546 /* "Save" the VRsave register too if we're saving the world. */
14547 if (info_ptr->vrsave_mask == 0)
c4ad648e 14548 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14549
14550 /* Because the Darwin register save/restore routines only handle
c4ad648e 14551 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14552 check. */
37409796
NS
14553 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14554 && (info_ptr->first_altivec_reg_save
14555 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14556 }
f676971a 14557 return;
d62294f5
FJ
14558}
14559
14560
00b960c7 14561static void
a2369ed3 14562is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14563{
14564 bool *yes = (bool *) xyes;
14565 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14566 *yes = true;
14567}
14568
4697a36c
MM
14569\f
14570/* Calculate the stack information for the current function. This is
14571 complicated by having two separate calling sequences, the AIX calling
14572 sequence and the V.4 calling sequence.
14573
592696dd 14574 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14575 32-bit 64-bit
4697a36c 14576 SP----> +---------------------------------------+
a260abc9 14577 | back chain to caller | 0 0
4697a36c 14578 +---------------------------------------+
a260abc9 14579 | saved CR | 4 8 (8-11)
4697a36c 14580 +---------------------------------------+
a260abc9 14581 | saved LR | 8 16
4697a36c 14582 +---------------------------------------+
a260abc9 14583 | reserved for compilers | 12 24
4697a36c 14584 +---------------------------------------+
a260abc9 14585 | reserved for binders | 16 32
4697a36c 14586 +---------------------------------------+
a260abc9 14587 | saved TOC pointer | 20 40
4697a36c 14588 +---------------------------------------+
a260abc9 14589 | Parameter save area (P) | 24 48
4697a36c 14590 +---------------------------------------+
a260abc9 14591 | Alloca space (A) | 24+P etc.
802a0058 14592 +---------------------------------------+
a7df97e6 14593 | Local variable space (L) | 24+P+A
4697a36c 14594 +---------------------------------------+
a7df97e6 14595 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14596 +---------------------------------------+
00b960c7
AH
14597 | Save area for AltiVec registers (W) | 24+P+A+L+X
14598 +---------------------------------------+
14599 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14600 +---------------------------------------+
14601 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14602 +---------------------------------------+
00b960c7
AH
14603 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14604 +---------------------------------------+
14605 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14606 +---------------------------------------+
14607 old SP->| back chain to caller's caller |
14608 +---------------------------------------+
14609
5376a30c
KR
14610 The required alignment for AIX configurations is two words (i.e., 8
14611 or 16 bytes).
14612
14613
4697a36c
MM
14614 V.4 stack frames look like:
14615
14616 SP----> +---------------------------------------+
14617 | back chain to caller | 0
14618 +---------------------------------------+
5eb387b8 14619 | caller's saved LR | 4
4697a36c
MM
14620 +---------------------------------------+
14621 | Parameter save area (P) | 8
14622 +---------------------------------------+
a7df97e6 14623 | Alloca space (A) | 8+P
f676971a 14624 +---------------------------------------+
a7df97e6 14625 | Varargs save area (V) | 8+P+A
f676971a 14626 +---------------------------------------+
a7df97e6 14627 | Local variable space (L) | 8+P+A+V
f676971a 14628 +---------------------------------------+
a7df97e6 14629 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14630 +---------------------------------------+
00b960c7
AH
14631 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14632 +---------------------------------------+
14633 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14634 +---------------------------------------+
14635 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14636 +---------------------------------------+
c4ad648e
AM
14637 | SPE: area for 64-bit GP registers |
14638 +---------------------------------------+
14639 | SPE alignment padding |
14640 +---------------------------------------+
00b960c7 14641 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14642 +---------------------------------------+
00b960c7 14643 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14644 +---------------------------------------+
00b960c7 14645 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14646 +---------------------------------------+
14647 old SP->| back chain to caller's caller |
14648 +---------------------------------------+
b6c9286a 14649
5376a30c
KR
14650 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14651 given. (But note below and in sysv4.h that we require only 8 and
14652 may round up the size of our stack frame anyways. The historical
14653 reason is early versions of powerpc-linux which didn't properly
14654 align the stack at program startup. A happy side-effect is that
14655 -mno-eabi libraries can be used with -meabi programs.)
14656
50d440bc 14657 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14658 the stack alignment requirements may differ. If -mno-eabi is not
14659 given, the required stack alignment is 8 bytes; if -mno-eabi is
14660 given, the required alignment is 16 bytes. (But see V.4 comment
14661 above.) */
4697a36c 14662
61b2fbe7
MM
14663#ifndef ABI_STACK_BOUNDARY
14664#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14665#endif
14666
d1d0c603 14667static rs6000_stack_t *
863d938c 14668rs6000_stack_info (void)
4697a36c 14669{
022123e6 14670 static rs6000_stack_t info;
4697a36c 14671 rs6000_stack_t *info_ptr = &info;
327e5343 14672 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14673 int ehrd_size;
64045029 14674 int save_align;
8070c91a 14675 int first_gp;
44688022 14676 HOST_WIDE_INT non_fixed_size;
4697a36c 14677
022123e6 14678 memset (&info, 0, sizeof (info));
4697a36c 14679
c19de7aa
AH
14680 if (TARGET_SPE)
14681 {
14682 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14683 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14684 cfun->machine->insn_chain_scanned_p
14685 = spe_func_has_64bit_regs_p () + 1;
14686 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14687 }
14688
a4f6c312 14689 /* Select which calling sequence. */
178274da 14690 info_ptr->abi = DEFAULT_ABI;
9878760c 14691
a4f6c312 14692 /* Calculate which registers need to be saved & save area size. */
4697a36c 14693 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14694 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14695 even if it currently looks like we won't. Reload may need it to
14696 get at a constant; if so, it will have already created a constant
14697 pool entry for it. */
2bfcf297 14698 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14699 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14700 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14701 && crtl->uses_const_pool
1db02437 14702 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14703 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14704 else
8070c91a
DJ
14705 first_gp = info_ptr->first_gp_reg_save;
14706
14707 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14708
a3170dc6
AH
14709 /* For the SPE, we have an additional upper 32-bits on each GPR.
14710 Ideally we should save the entire 64-bits only when the upper
14711 half is used in SIMD instructions. Since we only record
14712 registers live (not the size they are used in), this proves
14713 difficult because we'd have to traverse the instruction chain at
14714 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14715 so we opt to save the GPRs in 64-bits always if but one register
14716 gets used in 64-bits. Otherwise, all the registers in the frame
14717 get saved in 32-bits.
a3170dc6 14718
c19de7aa 14719 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14720 traditional GP save area will be empty. */
c19de7aa 14721 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14722 info_ptr->gp_size = 0;
14723
4697a36c
MM
14724 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14725 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14726
00b960c7
AH
14727 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14728 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14729 - info_ptr->first_altivec_reg_save);
14730
592696dd 14731 /* Does this function call anything? */
71f123ca
FS
14732 info_ptr->calls_p = (! current_function_is_leaf
14733 || cfun->machine->ra_needs_full_frame);
b6c9286a 14734
a4f6c312 14735 /* Determine if we need to save the link register. */
022123e6 14736 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14737 && crtl->profile
022123e6 14738 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14739#ifdef TARGET_RELOCATABLE
14740 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14741#endif
14742 || (info_ptr->first_fp_reg_save != 64
14743 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14744 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14745 || info_ptr->calls_p
14746 || rs6000_ra_ever_killed ())
4697a36c
MM
14747 {
14748 info_ptr->lr_save_p = 1;
1de43f85 14749 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14750 }
14751
9ebbca7d 14752 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14753 if (df_regs_ever_live_p (CR2_REGNO)
14754 || df_regs_ever_live_p (CR3_REGNO)
14755 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14756 {
14757 info_ptr->cr_save_p = 1;
178274da 14758 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14759 info_ptr->cr_size = reg_size;
14760 }
14761
83720594
RH
14762 /* If the current function calls __builtin_eh_return, then we need
14763 to allocate stack space for registers that will hold data for
14764 the exception handler. */
e3b5732b 14765 if (crtl->calls_eh_return)
83720594
RH
14766 {
14767 unsigned int i;
14768 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14769 continue;
a3170dc6
AH
14770
14771 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14772 ehrd_size = i * (TARGET_SPE_ABI
14773 && info_ptr->spe_64bit_regs_used != 0
14774 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14775 }
14776 else
14777 ehrd_size = 0;
14778
592696dd 14779 /* Determine various sizes. */
4697a36c
MM
14780 info_ptr->reg_size = reg_size;
14781 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14782 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14783 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14784 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14785 if (FRAME_GROWS_DOWNWARD)
14786 info_ptr->vars_size
5b667039
JJ
14787 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14788 + info_ptr->parm_size,
7d5175e1 14789 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14790 - (info_ptr->fixed_size + info_ptr->vars_size
14791 + info_ptr->parm_size);
00b960c7 14792
c19de7aa 14793 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14794 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14795 else
14796 info_ptr->spe_gp_size = 0;
14797
4d774ff8
HP
14798 if (TARGET_ALTIVEC_ABI)
14799 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14800 else
4d774ff8
HP
14801 info_ptr->vrsave_mask = 0;
14802
14803 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14804 info_ptr->vrsave_size = 4;
14805 else
14806 info_ptr->vrsave_size = 0;
b6c9286a 14807
d62294f5
FJ
14808 compute_save_world_info (info_ptr);
14809
592696dd 14810 /* Calculate the offsets. */
178274da 14811 switch (DEFAULT_ABI)
4697a36c 14812 {
b6c9286a 14813 case ABI_NONE:
24d304eb 14814 default:
37409796 14815 gcc_unreachable ();
b6c9286a
MM
14816
14817 case ABI_AIX:
ee890fe2 14818 case ABI_DARWIN:
b6c9286a
MM
14819 info_ptr->fp_save_offset = - info_ptr->fp_size;
14820 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14821
14822 if (TARGET_ALTIVEC_ABI)
14823 {
14824 info_ptr->vrsave_save_offset
14825 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14826
982afe02 14827 /* Align stack so vector save area is on a quadword boundary.
9278121c 14828 The padding goes above the vectors. */
00b960c7
AH
14829 if (info_ptr->altivec_size != 0)
14830 info_ptr->altivec_padding_size
9278121c 14831 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14832 else
14833 info_ptr->altivec_padding_size = 0;
14834
14835 info_ptr->altivec_save_offset
14836 = info_ptr->vrsave_save_offset
14837 - info_ptr->altivec_padding_size
14838 - info_ptr->altivec_size;
9278121c
GK
14839 gcc_assert (info_ptr->altivec_size == 0
14840 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14841
14842 /* Adjust for AltiVec case. */
14843 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14844 }
14845 else
14846 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14847 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14848 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14849 break;
14850
14851 case ABI_V4:
b6c9286a
MM
14852 info_ptr->fp_save_offset = - info_ptr->fp_size;
14853 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14854 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14855
c19de7aa 14856 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14857 {
14858 /* Align stack so SPE GPR save area is aligned on a
14859 double-word boundary. */
f78c3290 14860 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14861 info_ptr->spe_padding_size
14862 = 8 - (-info_ptr->cr_save_offset % 8);
14863 else
14864 info_ptr->spe_padding_size = 0;
14865
14866 info_ptr->spe_gp_save_offset
14867 = info_ptr->cr_save_offset
14868 - info_ptr->spe_padding_size
14869 - info_ptr->spe_gp_size;
14870
14871 /* Adjust for SPE case. */
022123e6 14872 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14873 }
a3170dc6 14874 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14875 {
14876 info_ptr->vrsave_save_offset
14877 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14878
14879 /* Align stack so vector save area is on a quadword boundary. */
14880 if (info_ptr->altivec_size != 0)
14881 info_ptr->altivec_padding_size
14882 = 16 - (-info_ptr->vrsave_save_offset % 16);
14883 else
14884 info_ptr->altivec_padding_size = 0;
14885
14886 info_ptr->altivec_save_offset
14887 = info_ptr->vrsave_save_offset
14888 - info_ptr->altivec_padding_size
14889 - info_ptr->altivec_size;
14890
14891 /* Adjust for AltiVec case. */
022123e6 14892 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14893 }
14894 else
022123e6
AM
14895 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14896 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14897 info_ptr->lr_save_offset = reg_size;
14898 break;
4697a36c
MM
14899 }
14900
64045029 14901 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14902 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14903 + info_ptr->gp_size
14904 + info_ptr->altivec_size
14905 + info_ptr->altivec_padding_size
a3170dc6
AH
14906 + info_ptr->spe_gp_size
14907 + info_ptr->spe_padding_size
00b960c7
AH
14908 + ehrd_size
14909 + info_ptr->cr_size
022123e6 14910 + info_ptr->vrsave_size,
64045029 14911 save_align);
00b960c7 14912
44688022 14913 non_fixed_size = (info_ptr->vars_size
ff381587 14914 + info_ptr->parm_size
5b667039 14915 + info_ptr->save_size);
ff381587 14916
44688022
AM
14917 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14918 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14919
14920 /* Determine if we need to allocate any stack frame:
14921
a4f6c312
SS
14922 For AIX we need to push the stack if a frame pointer is needed
14923 (because the stack might be dynamically adjusted), if we are
14924 debugging, if we make calls, or if the sum of fp_save, gp_save,
14925 and local variables are more than the space needed to save all
14926 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14927 + 18*8 = 288 (GPR13 reserved).
ff381587 14928
a4f6c312
SS
14929 For V.4 we don't have the stack cushion that AIX uses, but assume
14930 that the debugger can handle stackless frames. */
ff381587
MM
14931
14932 if (info_ptr->calls_p)
14933 info_ptr->push_p = 1;
14934
178274da 14935 else if (DEFAULT_ABI == ABI_V4)
44688022 14936 info_ptr->push_p = non_fixed_size != 0;
ff381587 14937
178274da
AM
14938 else if (frame_pointer_needed)
14939 info_ptr->push_p = 1;
14940
14941 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14942 info_ptr->push_p = 1;
14943
ff381587 14944 else
44688022 14945 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14946
a4f6c312 14947 /* Zero offsets if we're not saving those registers. */
8dda1a21 14948 if (info_ptr->fp_size == 0)
4697a36c
MM
14949 info_ptr->fp_save_offset = 0;
14950
8dda1a21 14951 if (info_ptr->gp_size == 0)
4697a36c
MM
14952 info_ptr->gp_save_offset = 0;
14953
00b960c7
AH
14954 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14955 info_ptr->altivec_save_offset = 0;
14956
14957 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14958 info_ptr->vrsave_save_offset = 0;
14959
c19de7aa
AH
14960 if (! TARGET_SPE_ABI
14961 || info_ptr->spe_64bit_regs_used == 0
14962 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14963 info_ptr->spe_gp_save_offset = 0;
14964
c81fc13e 14965 if (! info_ptr->lr_save_p)
4697a36c
MM
14966 info_ptr->lr_save_offset = 0;
14967
c81fc13e 14968 if (! info_ptr->cr_save_p)
4697a36c
MM
14969 info_ptr->cr_save_offset = 0;
14970
14971 return info_ptr;
14972}
14973
c19de7aa
AH
14974/* Return true if the current function uses any GPRs in 64-bit SIMD
14975 mode. */
14976
14977static bool
863d938c 14978spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14979{
14980 rtx insns, insn;
14981
14982 /* Functions that save and restore all the call-saved registers will
14983 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14984 if (crtl->calls_eh_return
14985 || cfun->calls_setjmp
14986 || crtl->has_nonlocal_goto)
c19de7aa
AH
14987 return true;
14988
14989 insns = get_insns ();
14990
14991 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14992 {
14993 if (INSN_P (insn))
14994 {
14995 rtx i;
14996
b5a5beb9
AH
14997 /* FIXME: This should be implemented with attributes...
14998
14999 (set_attr "spe64" "true")....then,
15000 if (get_spe64(insn)) return true;
15001
15002 It's the only reliable way to do the stuff below. */
15003
c19de7aa 15004 i = PATTERN (insn);
f82f556d
AH
15005 if (GET_CODE (i) == SET)
15006 {
15007 enum machine_mode mode = GET_MODE (SET_SRC (i));
15008
15009 if (SPE_VECTOR_MODE (mode))
15010 return true;
4f011e1e 15011 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15012 return true;
15013 }
c19de7aa
AH
15014 }
15015 }
15016
15017 return false;
15018}
15019
d1d0c603 15020static void
a2369ed3 15021debug_stack_info (rs6000_stack_t *info)
9878760c 15022{
d330fd93 15023 const char *abi_string;
24d304eb 15024
c81fc13e 15025 if (! info)
4697a36c
MM
15026 info = rs6000_stack_info ();
15027
15028 fprintf (stderr, "\nStack information for function %s:\n",
15029 ((current_function_decl && DECL_NAME (current_function_decl))
15030 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15031 : "<unknown>"));
15032
24d304eb
RK
15033 switch (info->abi)
15034 {
b6c9286a
MM
15035 default: abi_string = "Unknown"; break;
15036 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15037 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15038 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15039 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15040 }
15041
15042 fprintf (stderr, "\tABI = %5s\n", abi_string);
15043
00b960c7
AH
15044 if (TARGET_ALTIVEC_ABI)
15045 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15046
a3170dc6
AH
15047 if (TARGET_SPE_ABI)
15048 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15049
4697a36c
MM
15050 if (info->first_gp_reg_save != 32)
15051 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15052
15053 if (info->first_fp_reg_save != 64)
15054 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15055
00b960c7
AH
15056 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15057 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15058 info->first_altivec_reg_save);
15059
4697a36c
MM
15060 if (info->lr_save_p)
15061 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15062
4697a36c
MM
15063 if (info->cr_save_p)
15064 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15065
00b960c7
AH
15066 if (info->vrsave_mask)
15067 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15068
4697a36c
MM
15069 if (info->push_p)
15070 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15071
15072 if (info->calls_p)
15073 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15074
4697a36c
MM
15075 if (info->gp_save_offset)
15076 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15077
15078 if (info->fp_save_offset)
15079 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15080
00b960c7
AH
15081 if (info->altivec_save_offset)
15082 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15083 info->altivec_save_offset);
15084
a3170dc6
AH
15085 if (info->spe_gp_save_offset)
15086 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15087 info->spe_gp_save_offset);
15088
00b960c7
AH
15089 if (info->vrsave_save_offset)
15090 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15091 info->vrsave_save_offset);
15092
4697a36c
MM
15093 if (info->lr_save_offset)
15094 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15095
15096 if (info->cr_save_offset)
15097 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15098
15099 if (info->varargs_save_offset)
15100 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15101
15102 if (info->total_size)
d1d0c603
JJ
15103 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15104 info->total_size);
4697a36c 15105
4697a36c 15106 if (info->vars_size)
d1d0c603
JJ
15107 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15108 info->vars_size);
4697a36c
MM
15109
15110 if (info->parm_size)
15111 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15112
15113 if (info->fixed_size)
15114 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15115
15116 if (info->gp_size)
15117 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15118
a3170dc6
AH
15119 if (info->spe_gp_size)
15120 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15121
4697a36c
MM
15122 if (info->fp_size)
15123 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15124
00b960c7
AH
15125 if (info->altivec_size)
15126 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15127
15128 if (info->vrsave_size)
15129 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15130
15131 if (info->altivec_padding_size)
15132 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15133 info->altivec_padding_size);
15134
a3170dc6
AH
15135 if (info->spe_padding_size)
15136 fprintf (stderr, "\tspe_padding_size = %5d\n",
15137 info->spe_padding_size);
15138
4697a36c
MM
15139 if (info->cr_size)
15140 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15141
15142 if (info->save_size)
15143 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15144
15145 if (info->reg_size != 4)
15146 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15147
15148 fprintf (stderr, "\n");
9878760c 15149}
71f123ca
FS
15150
15151rtx
a2369ed3 15152rs6000_return_addr (int count, rtx frame)
71f123ca 15153{
a4f6c312
SS
15154 /* Currently we don't optimize very well between prolog and body
15155 code and for PIC code the code can be actually quite bad, so
15156 don't try to be too clever here. */
f1384257 15157 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15158 {
15159 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15160
15161 return
15162 gen_rtx_MEM
15163 (Pmode,
15164 memory_address
15165 (Pmode,
15166 plus_constant (copy_to_reg
15167 (gen_rtx_MEM (Pmode,
15168 memory_address (Pmode, frame))),
15169 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15170 }
15171
8c29550d 15172 cfun->machine->ra_need_lr = 1;
1de43f85 15173 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15174}
15175
5e1bf043
DJ
15176/* Say whether a function is a candidate for sibcall handling or not.
15177 We do not allow indirect calls to be optimized into sibling calls.
15178 Also, we can't do it if there are any vector parameters; there's
15179 nowhere to put the VRsave code so it works; note that functions with
15180 vector parameters are required to have a prototype, so the argument
15181 type info must be available here. (The tail recursion case can work
15182 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15183static bool
a2369ed3 15184rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15185{
15186 tree type;
4977bab6 15187 if (decl)
5e1bf043
DJ
15188 {
15189 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15190 {
4977bab6 15191 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15192 type; type = TREE_CHAIN (type))
15193 {
c15b529f 15194 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15195 return false;
5e1bf043 15196 }
c4ad648e 15197 }
5e1bf043 15198 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15199 || ((*targetm.binds_local_p) (decl)
15200 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15201 {
4977bab6 15202 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15203
15204 if (!lookup_attribute ("longcall", attr_list)
15205 || lookup_attribute ("shortcall", attr_list))
4977bab6 15206 return true;
2bcc50d0 15207 }
5e1bf043 15208 }
4977bab6 15209 return false;
5e1bf043
DJ
15210}
15211
e7e64a25
AS
15212/* NULL if INSN insn is valid within a low-overhead loop.
15213 Otherwise return why doloop cannot be applied.
9419649c
DE
15214 PowerPC uses the COUNT register for branch on table instructions. */
15215
e7e64a25 15216static const char *
3101faab 15217rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15218{
15219 if (CALL_P (insn))
e7e64a25 15220 return "Function call in the loop.";
9419649c
DE
15221
15222 if (JUMP_P (insn)
15223 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15224 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15225 return "Computed branch in the loop.";
9419649c 15226
e7e64a25 15227 return NULL;
9419649c
DE
15228}
15229
71f123ca 15230static int
863d938c 15231rs6000_ra_ever_killed (void)
71f123ca
FS
15232{
15233 rtx top;
5e1bf043
DJ
15234 rtx reg;
15235 rtx insn;
71f123ca 15236
3c072c6b 15237 if (cfun->is_thunk)
71f123ca 15238 return 0;
eb0424da 15239
36f7e964
AH
15240 /* regs_ever_live has LR marked as used if any sibcalls are present,
15241 but this should not force saving and restoring in the
15242 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15243 clobbers LR, so that is inappropriate. */
36f7e964 15244
5e1bf043
DJ
15245 /* Also, the prologue can generate a store into LR that
15246 doesn't really count, like this:
36f7e964 15247
5e1bf043
DJ
15248 move LR->R0
15249 bcl to set PIC register
15250 move LR->R31
15251 move R0->LR
36f7e964
AH
15252
15253 When we're called from the epilogue, we need to avoid counting
15254 this as a store. */
f676971a 15255
71f123ca
FS
15256 push_topmost_sequence ();
15257 top = get_insns ();
15258 pop_topmost_sequence ();
1de43f85 15259 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15260
5e1bf043
DJ
15261 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15262 {
15263 if (INSN_P (insn))
15264 {
022123e6
AM
15265 if (CALL_P (insn))
15266 {
15267 if (!SIBLING_CALL_P (insn))
15268 return 1;
15269 }
1de43f85 15270 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15271 return 1;
36f7e964
AH
15272 else if (set_of (reg, insn) != NULL_RTX
15273 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15274 return 1;
15275 }
15276 }
15277 return 0;
71f123ca 15278}
4697a36c 15279\f
9ebbca7d 15280/* Emit instructions needed to load the TOC register.
c7ca610e 15281 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15282 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15283
15284void
a2369ed3 15285rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15286{
6fb5fa3c 15287 rtx dest;
1db02437 15288 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15289
7f970b70 15290 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15291 {
7f970b70 15292 char buf[30];
e65a3857 15293 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15294
15295 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15296 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15297 if (flag_pic == 2)
15298 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15299 else
15300 got = rs6000_got_sym ();
15301 tmp1 = tmp2 = dest;
15302 if (!fromprolog)
15303 {
15304 tmp1 = gen_reg_rtx (Pmode);
15305 tmp2 = gen_reg_rtx (Pmode);
15306 }
6fb5fa3c
DB
15307 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15308 emit_move_insn (tmp1,
1de43f85 15309 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15310 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15311 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15312 }
15313 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15314 {
6fb5fa3c 15315 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15316 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15317 }
15318 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15319 {
15320 char buf[30];
20b71b17
AM
15321 rtx temp0 = (fromprolog
15322 ? gen_rtx_REG (Pmode, 0)
15323 : gen_reg_rtx (Pmode));
20b71b17 15324
20b71b17
AM
15325 if (fromprolog)
15326 {
ccbca5e4 15327 rtx symF, symL;
38c1f2d7 15328
20b71b17
AM
15329 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15330 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15331
20b71b17
AM
15332 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15333 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15334
6fb5fa3c
DB
15335 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15336 emit_move_insn (dest,
1de43f85 15337 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15338 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15339 }
15340 else
20b71b17
AM
15341 {
15342 rtx tocsym;
20b71b17
AM
15343
15344 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15345 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15346 emit_move_insn (dest,
1de43f85 15347 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15348 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15349 }
6fb5fa3c 15350 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15351 }
20b71b17
AM
15352 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15353 {
15354 /* This is for AIX code running in non-PIC ELF32. */
15355 char buf[30];
15356 rtx realsym;
15357 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15358 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15359
6fb5fa3c
DB
15360 emit_insn (gen_elf_high (dest, realsym));
15361 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15362 }
37409796 15363 else
9ebbca7d 15364 {
37409796 15365 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15366
9ebbca7d 15367 if (TARGET_32BIT)
6fb5fa3c 15368 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15369 else
6fb5fa3c 15370 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15371 }
15372}
15373
d1d0c603
JJ
15374/* Emit instructions to restore the link register after determining where
15375 its value has been stored. */
15376
15377void
15378rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15379{
15380 rs6000_stack_t *info = rs6000_stack_info ();
15381 rtx operands[2];
15382
15383 operands[0] = source;
15384 operands[1] = scratch;
15385
15386 if (info->lr_save_p)
15387 {
15388 rtx frame_rtx = stack_pointer_rtx;
15389 HOST_WIDE_INT sp_offset = 0;
15390 rtx tmp;
15391
15392 if (frame_pointer_needed
e3b5732b 15393 || cfun->calls_alloca
d1d0c603
JJ
15394 || info->total_size > 32767)
15395 {
0be76840 15396 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15397 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15398 frame_rtx = operands[1];
15399 }
15400 else if (info->push_p)
15401 sp_offset = info->total_size;
15402
15403 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15404 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15405 emit_move_insn (tmp, operands[0]);
15406 }
15407 else
1de43f85 15408 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15409}
15410
4862826d 15411static GTY(()) alias_set_type set = -1;
f103e34d 15412
4862826d 15413alias_set_type
863d938c 15414get_TOC_alias_set (void)
9ebbca7d 15415{
f103e34d
GK
15416 if (set == -1)
15417 set = new_alias_set ();
15418 return set;
f676971a 15419}
9ebbca7d 15420
c1207243 15421/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15422 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15423 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15424#if TARGET_ELF
3c9eb5f4 15425static int
f676971a 15426uses_TOC (void)
9ebbca7d 15427{
c4501e62 15428 rtx insn;
38c1f2d7 15429
c4501e62
JJ
15430 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15431 if (INSN_P (insn))
15432 {
15433 rtx pat = PATTERN (insn);
15434 int i;
9ebbca7d 15435
f676971a 15436 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15437 for (i = 0; i < XVECLEN (pat, 0); i++)
15438 {
15439 rtx sub = XVECEXP (pat, 0, i);
15440 if (GET_CODE (sub) == USE)
15441 {
15442 sub = XEXP (sub, 0);
15443 if (GET_CODE (sub) == UNSPEC
15444 && XINT (sub, 1) == UNSPEC_TOC)
15445 return 1;
15446 }
15447 }
15448 }
15449 return 0;
9ebbca7d 15450}
c954844a 15451#endif
38c1f2d7 15452
9ebbca7d 15453rtx
f676971a 15454create_TOC_reference (rtx symbol)
9ebbca7d 15455{
b3a13419 15456 if (!can_create_pseudo_p ())
6fb5fa3c 15457 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15458 return gen_rtx_PLUS (Pmode,
a8a05998 15459 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15460 gen_rtx_CONST (Pmode,
2e4316da 15461 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15462}
38c1f2d7 15463
fc4767bb
JJ
15464/* If _Unwind_* has been called from within the same module,
15465 toc register is not guaranteed to be saved to 40(1) on function
15466 entry. Save it there in that case. */
c7ca610e 15467
9ebbca7d 15468void
863d938c 15469rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15470{
15471 rtx mem;
15472 rtx stack_top = gen_reg_rtx (Pmode);
15473 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15474 rtx opcode = gen_reg_rtx (SImode);
15475 rtx tocompare = gen_reg_rtx (SImode);
15476 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15477
8308679f 15478 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15479 emit_move_insn (stack_top, mem);
15480
8308679f
DE
15481 mem = gen_frame_mem (Pmode,
15482 gen_rtx_PLUS (Pmode, stack_top,
15483 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15484 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15485 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15486 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15487 : 0xE8410028, SImode));
9ebbca7d 15488
fc4767bb 15489 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15490 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15491 no_toc_save_needed);
9ebbca7d 15492
8308679f
DE
15493 mem = gen_frame_mem (Pmode,
15494 gen_rtx_PLUS (Pmode, stack_top,
15495 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15496 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15497 emit_label (no_toc_save_needed);
9ebbca7d 15498}
38c1f2d7 15499\f
0be76840
DE
15500/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15501 and the change to the stack pointer. */
ba4828e0 15502
9ebbca7d 15503static void
863d938c 15504rs6000_emit_stack_tie (void)
9ebbca7d 15505{
0be76840
DE
15506 rtx mem = gen_frame_mem (BLKmode,
15507 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15508
9ebbca7d
GK
15509 emit_insn (gen_stack_tie (mem));
15510}
38c1f2d7 15511
9ebbca7d
GK
15512/* Emit the correct code for allocating stack space, as insns.
15513 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15514 If COPY_R11, make sure a copy of the old frame is left in r11,
15515 in preference to r12 if COPY_R12.
9ebbca7d
GK
15516 The generated code may use hard register 0 as a temporary. */
15517
15518static void
f78c3290 15519rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15520{
9ebbca7d
GK
15521 rtx insn;
15522 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15523 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1 15524 rtx todec = gen_int_mode (-size, Pmode);
d4bacef0 15525 rtx par, set, mem;
61168ff1
RS
15526
15527 if (INTVAL (todec) != -size)
15528 {
d4ee4d25 15529 warning (0, "stack frame too large");
61168ff1
RS
15530 emit_insn (gen_trap ());
15531 return;
15532 }
a157febd 15533
e3b5732b 15534 if (crtl->limit_stack)
a157febd
GK
15535 {
15536 if (REG_P (stack_limit_rtx)
f676971a 15537 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15538 && REGNO (stack_limit_rtx) <= 31)
15539 {
5b71a4e7 15540 emit_insn (TARGET_32BIT
9ebbca7d
GK
15541 ? gen_addsi3 (tmp_reg,
15542 stack_limit_rtx,
15543 GEN_INT (size))
15544 : gen_adddi3 (tmp_reg,
15545 stack_limit_rtx,
15546 GEN_INT (size)));
5b71a4e7 15547
9ebbca7d
GK
15548 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15549 const0_rtx));
a157febd
GK
15550 }
15551 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15552 && TARGET_32BIT
f607bc57 15553 && DEFAULT_ABI == ABI_V4)
a157febd 15554 {
9ebbca7d 15555 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15556 gen_rtx_PLUS (Pmode,
15557 stack_limit_rtx,
9ebbca7d 15558 GEN_INT (size)));
5b71a4e7 15559
9ebbca7d
GK
15560 emit_insn (gen_elf_high (tmp_reg, toload));
15561 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15562 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15563 const0_rtx));
a157febd
GK
15564 }
15565 else
d4ee4d25 15566 warning (0, "stack limit expression is not supported");
a157febd
GK
15567 }
15568
d4bacef0 15569 if (copy_r12 || copy_r11)
f78c3290
NF
15570 emit_move_insn (copy_r11
15571 ? gen_rtx_REG (Pmode, 11)
15572 : gen_rtx_REG (Pmode, 12),
15573 stack_reg);
9ebbca7d 15574
d4bacef0 15575 if (size > 32767)
38c1f2d7 15576 {
d4bacef0
BE
15577 /* Need a note here so that try_split doesn't get confused. */
15578 if (get_last_insn () == NULL_RTX)
15579 emit_note (NOTE_INSN_DELETED);
15580 insn = emit_move_insn (tmp_reg, todec);
15581 try_split (PATTERN (insn), insn, 0);
15582 todec = tmp_reg;
9ebbca7d 15583 }
d4bacef0
BE
15584
15585 insn = emit_insn (TARGET_32BIT
15586 ? gen_movsi_update_stack (stack_reg, stack_reg,
15587 todec, stack_reg)
15588 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15589 todec, stack_reg));
15590 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15591 it now and set the alias set/attributes. The above gen_*_update
15592 calls will generate a PARALLEL with the MEM set being the first
15593 operation. */
15594 par = PATTERN (insn);
15595 gcc_assert (GET_CODE (par) == PARALLEL);
15596 set = XVECEXP (par, 0, 0);
15597 gcc_assert (GET_CODE (set) == SET);
15598 mem = SET_DEST (set);
15599 gcc_assert (MEM_P (mem));
15600 MEM_NOTRAP_P (mem) = 1;
15601 set_mem_alias_set (mem, get_frame_alias_set ());
f676971a 15602
9ebbca7d 15603 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a
ILT
15604 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
15605 gen_rtx_SET (VOIDmode, stack_reg,
15606 gen_rtx_PLUS (Pmode, stack_reg,
15607 GEN_INT (-size))));
9ebbca7d
GK
15608}
15609
a4f6c312
SS
15610/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15611 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15612 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15613 deduce these equivalences by itself so it wasn't necessary to hold
15614 its hand so much. */
9ebbca7d
GK
15615
15616static void
f676971a 15617rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15618 rtx reg2, rtx rreg)
9ebbca7d
GK
15619{
15620 rtx real, temp;
15621
e56c4463
JL
15622 /* copy_rtx will not make unique copies of registers, so we need to
15623 ensure we don't have unwanted sharing here. */
15624 if (reg == reg2)
15625 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15626
15627 if (reg == rreg)
15628 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15629
9ebbca7d
GK
15630 real = copy_rtx (PATTERN (insn));
15631
89e7058f
AH
15632 if (reg2 != NULL_RTX)
15633 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15634
15635 real = replace_rtx (real, reg,
9ebbca7d
GK
15636 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15637 STACK_POINTER_REGNUM),
15638 GEN_INT (val)));
f676971a 15639
9ebbca7d
GK
15640 /* We expect that 'real' is either a SET or a PARALLEL containing
15641 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15642 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15643
15644 if (GET_CODE (real) == SET)
15645 {
15646 rtx set = real;
f676971a 15647
9ebbca7d
GK
15648 temp = simplify_rtx (SET_SRC (set));
15649 if (temp)
15650 SET_SRC (set) = temp;
15651 temp = simplify_rtx (SET_DEST (set));
15652 if (temp)
15653 SET_DEST (set) = temp;
15654 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15655 {
9ebbca7d
GK
15656 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15657 if (temp)
15658 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15659 }
38c1f2d7 15660 }
37409796 15661 else
9ebbca7d
GK
15662 {
15663 int i;
37409796
NS
15664
15665 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15666 for (i = 0; i < XVECLEN (real, 0); i++)
15667 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15668 {
15669 rtx set = XVECEXP (real, 0, i);
f676971a 15670
9ebbca7d
GK
15671 temp = simplify_rtx (SET_SRC (set));
15672 if (temp)
15673 SET_SRC (set) = temp;
15674 temp = simplify_rtx (SET_DEST (set));
15675 if (temp)
15676 SET_DEST (set) = temp;
15677 if (GET_CODE (SET_DEST (set)) == MEM)
15678 {
15679 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15680 if (temp)
15681 XEXP (SET_DEST (set), 0) = temp;
15682 }
15683 RTX_FRAME_RELATED_P (set) = 1;
15684 }
15685 }
c19de7aa 15686
9ebbca7d 15687 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a 15688 add_reg_note (insn, REG_FRAME_RELATED_EXPR, real);
38c1f2d7
MM
15689}
15690
00b960c7
AH
15691/* Returns an insn that has a vrsave set operation with the
15692 appropriate CLOBBERs. */
15693
15694static rtx
a2369ed3 15695generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15696{
15697 int nclobs, i;
15698 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15699 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15700
a004eb82
AH
15701 clobs[0]
15702 = gen_rtx_SET (VOIDmode,
15703 vrsave,
15704 gen_rtx_UNSPEC_VOLATILE (SImode,
15705 gen_rtvec (2, reg, vrsave),
3aca4bff 15706 UNSPECV_SET_VRSAVE));
00b960c7
AH
15707
15708 nclobs = 1;
15709
9aa86737
AH
15710 /* We need to clobber the registers in the mask so the scheduler
15711 does not move sets to VRSAVE before sets of AltiVec registers.
15712
15713 However, if the function receives nonlocal gotos, reload will set
15714 all call saved registers live. We will end up with:
15715
15716 (set (reg 999) (mem))
15717 (parallel [ (set (reg vrsave) (unspec blah))
15718 (clobber (reg 999))])
15719
15720 The clobber will cause the store into reg 999 to be dead, and
15721 flow will attempt to delete an epilogue insn. In this case, we
15722 need an unspec use/set of the register. */
00b960c7
AH
15723
15724 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15725 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15726 {
15727 if (!epiloguep || call_used_regs [i])
15728 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15729 gen_rtx_REG (V4SImode, i));
15730 else
15731 {
15732 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15733
15734 clobs[nclobs++]
a004eb82
AH
15735 = gen_rtx_SET (VOIDmode,
15736 reg,
15737 gen_rtx_UNSPEC (V4SImode,
15738 gen_rtvec (1, reg), 27));
9aa86737
AH
15739 }
15740 }
00b960c7
AH
15741
15742 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15743
15744 for (i = 0; i < nclobs; ++i)
15745 XVECEXP (insn, 0, i) = clobs[i];
15746
15747 return insn;
15748}
15749
89e7058f
AH
15750/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15751 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15752
15753static void
f676971a 15754emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15755 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15756{
15757 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15758 rtx replacea, replaceb;
15759
15760 int_rtx = GEN_INT (offset);
15761
15762 /* Some cases that need register indexed addressing. */
15763 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15764 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15765 || (TARGET_SPE_ABI
15766 && SPE_VECTOR_MODE (mode)
15767 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15768 {
15769 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15770 flow path of instructions in the prologue. */
89e7058f
AH
15771 offset_rtx = gen_rtx_REG (Pmode, 11);
15772 emit_move_insn (offset_rtx, int_rtx);
15773
15774 replacea = offset_rtx;
15775 replaceb = int_rtx;
15776 }
15777 else
15778 {
15779 offset_rtx = int_rtx;
15780 replacea = NULL_RTX;
15781 replaceb = NULL_RTX;
15782 }
15783
15784 reg = gen_rtx_REG (mode, regno);
15785 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15786 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15787
15788 insn = emit_move_insn (mem, reg);
15789
15790 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15791}
15792
a3170dc6
AH
15793/* Emit an offset memory reference suitable for a frame store, while
15794 converting to a valid addressing mode. */
15795
15796static rtx
a2369ed3 15797gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15798{
15799 rtx int_rtx, offset_rtx;
15800
15801 int_rtx = GEN_INT (offset);
15802
4d4cbc0e 15803 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15804 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15805 {
15806 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15807 emit_move_insn (offset_rtx, int_rtx);
15808 }
15809 else
15810 offset_rtx = int_rtx;
15811
0be76840 15812 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15813}
15814
6d0a8091
DJ
15815/* Look for user-defined global regs. We should not save and restore these,
15816 and cannot use stmw/lmw if there are any in its range. */
15817
15818static bool
f78c3290 15819no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15820{
15821 int i;
e1ece9f1 15822 for (i = first; i < gpr ? 32 : 64 ; i++)
f78c3290 15823 if (global_regs[i])
6d0a8091
DJ
15824 return false;
15825 return true;
15826}
15827
699c914a
MS
15828#ifndef TARGET_FIX_AND_CONTINUE
15829#define TARGET_FIX_AND_CONTINUE 0
15830#endif
15831
f78c3290
NF
15832/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15833#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15834#define LAST_SAVRES_REGISTER 31
15835#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15836
15837static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15838
15839/* Return the symbol for an out-of-line register save/restore routine.
15840 We are saving/restoring GPRs if GPR is true. */
15841
15842static rtx
15843rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15844{
15845 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15846 rtx sym;
15847 int select = ((savep ? 1 : 0) << 2
15848 | (gpr
15849 /* On the SPE, we never have any FPRs, but we do have
15850 32/64-bit versions of the routines. */
15851 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15852 : 0) << 1
15853 | (exitp ? 1: 0));
15854
15855 /* Don't generate bogus routine names. */
15856 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15857
15858 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15859
15860 if (sym == NULL)
15861 {
15862 char name[30];
15863 const char *action;
15864 const char *regkind;
15865 const char *exit_suffix;
15866
15867 action = savep ? "save" : "rest";
15868
15869 /* SPE has slightly different names for its routines depending on
15870 whether we are saving 32-bit or 64-bit registers. */
15871 if (TARGET_SPE_ABI)
15872 {
15873 /* No floating point saves on the SPE. */
15874 gcc_assert (gpr);
15875
15876 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15877 }
15878 else
15879 regkind = gpr ? "gpr" : "fpr";
15880
15881 exit_suffix = exitp ? "_x" : "";
15882
15883 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15884
15885 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15886 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15887 }
15888
15889 return sym;
15890}
15891
15892/* Emit a sequence of insns, including a stack tie if needed, for
15893 resetting the stack pointer. If SAVRES is true, then don't reset the
15894 stack pointer, but move the base of the frame into r11 for use by
15895 out-of-line register restore routines. */
15896
ff35822b 15897static rtx
f78c3290
NF
15898rs6000_emit_stack_reset (rs6000_stack_t *info,
15899 rtx sp_reg_rtx, rtx frame_reg_rtx,
15900 int sp_offset, bool savres)
15901{
15902 /* This blockage is needed so that sched doesn't decide to move
15903 the sp change before the register restores. */
15904 if (frame_reg_rtx != sp_reg_rtx
15905 || (TARGET_SPE_ABI
15906 && info->spe_64bit_regs_used != 0
15907 && info->first_gp_reg_save != 32))
15908 rs6000_emit_stack_tie ();
15909
15910 if (frame_reg_rtx != sp_reg_rtx)
15911 {
f78c3290 15912 if (sp_offset != 0)
ff35822b
JJ
15913 return emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15914 GEN_INT (sp_offset)));
f78c3290 15915 else if (!savres)
ff35822b 15916 return emit_move_insn (sp_reg_rtx, frame_reg_rtx);
f78c3290
NF
15917 }
15918 else if (sp_offset != 0)
15919 {
15920 /* If we are restoring registers out-of-line, we will be using the
15921 "exit" variants of the restore routines, which will reset the
15922 stack for us. But we do need to point r11 into the right place
15923 for those routines. */
15924 rtx dest_reg = (savres
15925 ? gen_rtx_REG (Pmode, 11)
15926 : sp_reg_rtx);
15927
ff35822b
JJ
15928 rtx insn = emit_insn (gen_add3_insn (dest_reg, sp_reg_rtx,
15929 GEN_INT (sp_offset)));
15930 if (!savres)
15931 return insn;
f78c3290 15932 }
ff35822b 15933 return NULL_RTX;
f78c3290
NF
15934}
15935
15936/* Construct a parallel rtx describing the effect of a call to an
15937 out-of-line register save/restore routine. */
15938
15939static rtx
15940rs6000_make_savres_rtx (rs6000_stack_t *info,
15941 rtx frame_reg_rtx, int save_area_offset,
15942 enum machine_mode reg_mode,
15943 bool savep, bool gpr, bool exitp)
15944{
15945 int i;
15946 int offset, start_reg, end_reg, n_regs;
15947 int reg_size = GET_MODE_SIZE (reg_mode);
15948 rtx sym;
15949 rtvec p;
15950
15951 offset = 0;
15952 start_reg = (gpr
15953 ? info->first_gp_reg_save
15954 : info->first_fp_reg_save);
15955 end_reg = gpr ? 32 : 64;
15956 n_regs = end_reg - start_reg;
15957 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15958
15959 /* If we're saving registers, then we should never say we're exiting. */
15960 gcc_assert ((savep && !exitp) || !savep);
15961
15962 if (exitp)
15963 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15964
15965 RTVEC_ELT (p, offset++)
15966 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15967
15968 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15969 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15970 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15971
15972 for (i = 0; i < end_reg - start_reg; i++)
15973 {
15974 rtx addr, reg, mem;
15975 reg = gen_rtx_REG (reg_mode, start_reg + i);
15976 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15977 GEN_INT (save_area_offset + reg_size*i));
15978 mem = gen_frame_mem (reg_mode, addr);
15979
15980 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15981 savep ? mem : reg,
15982 savep ? reg : mem);
15983 }
15984
15985 return gen_rtx_PARALLEL (VOIDmode, p);
15986}
15987
52ff33d0
NF
15988/* Determine whether the gp REG is really used. */
15989
15990static bool
15991rs6000_reg_live_or_pic_offset_p (int reg)
15992{
6fb5fa3c 15993 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15994 && (!call_used_regs[reg]
15995 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15996 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15997 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15998 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15999 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16000}
16001
f78c3290
NF
16002enum {
16003 SAVRES_MULTIPLE = 0x1,
16004 SAVRES_INLINE_FPRS = 0x2,
16005 SAVRES_INLINE_GPRS = 0x4
16006};
16007
16008/* Determine the strategy for savings/restoring registers. */
16009
16010static int
16011rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16012 int using_static_chain_p, int sibcall)
16013{
16014 bool using_multiple_p;
16015 bool common;
16016 bool savres_fprs_inline;
16017 bool savres_gprs_inline;
16018 bool noclobber_global_gprs
16019 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16020
16021 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16022 && (!TARGET_SPE_ABI
16023 || info->spe_64bit_regs_used == 0)
16024 && info->first_gp_reg_save < 31
16025 && noclobber_global_gprs);
16026 /* Don't bother to try to save things out-of-line if r11 is occupied
16027 by the static chain. It would require too much fiddling and the
16028 static chain is rarely used anyway. */
16029 common = (using_static_chain_p
16030 || sibcall
16031 || crtl->calls_eh_return
16032 || !info->lr_save_p
16033 || cfun->machine->ra_need_lr
16034 || info->total_size > 32767);
16035 savres_fprs_inline = (common
16036 || info->first_fp_reg_save == 64
16037 || !no_global_regs_above (info->first_fp_reg_save,
16038 /*gpr=*/false)
16039 || FP_SAVE_INLINE (info->first_fp_reg_save));
16040 savres_gprs_inline = (common
16041 /* Saving CR interferes with the exit routines
16042 used on the SPE, so just punt here. */
16043 || (!savep
16044 && TARGET_SPE_ABI
16045 && info->spe_64bit_regs_used != 0
16046 && info->cr_save_p != 0)
16047 || info->first_gp_reg_save == 32
16048 || !noclobber_global_gprs
16049 || GP_SAVE_INLINE (info->first_gp_reg_save));
16050
16051 if (savep)
16052 /* If we are going to use store multiple, then don't even bother
16053 with the out-of-line routines, since the store-multiple instruction
16054 will always be smaller. */
16055 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16056 else
16057 {
16058 /* The situation is more complicated with load multiple. We'd
16059 prefer to use the out-of-line routines for restores, since the
16060 "exit" out-of-line routines can handle the restore of LR and
16061 the frame teardown. But we can only use the out-of-line
16062 routines if we know that we've used store multiple or
16063 out-of-line routines in the prologue, i.e. if we've saved all
16064 the registers from first_gp_reg_save. Otherwise, we risk
16065 loading garbage from the stack. Furthermore, we can only use
16066 the "exit" out-of-line gpr restore if we haven't saved any
16067 fprs. */
16068 bool saved_all = !savres_gprs_inline || using_multiple_p;
16069
16070 if (saved_all && info->first_fp_reg_save != 64)
16071 /* We can't use the exit routine; use load multiple if it's
16072 available. */
16073 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16074 }
16075
16076 return (using_multiple_p
16077 | (savres_fprs_inline << 1)
16078 | (savres_gprs_inline << 2));
16079}
16080
9ebbca7d
GK
16081/* Emit function prologue as insns. */
16082
9878760c 16083void
863d938c 16084rs6000_emit_prologue (void)
9878760c 16085{
4697a36c 16086 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16087 enum machine_mode reg_mode = Pmode;
327e5343 16088 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16089 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16090 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16091 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16092 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16093 rtx insn;
f78c3290 16094 int strategy;
9ebbca7d 16095 int saving_FPRs_inline;
f78c3290 16096 int saving_GPRs_inline;
9ebbca7d 16097 int using_store_multiple;
f78c3290
NF
16098 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16099 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
e1ece9f1 16100 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16101 HOST_WIDE_INT sp_offset = 0;
f676971a 16102
699c914a
MS
16103 if (TARGET_FIX_AND_CONTINUE)
16104 {
16105 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16106 address by modifying the first 5 instructions of the function
699c914a
MS
16107 to branch to the overriding function. This is necessary to
16108 permit function pointers that point to the old function to
16109 actually forward to the new function. */
16110 emit_insn (gen_nop ());
16111 emit_insn (gen_nop ());
de2ab0ca 16112 emit_insn (gen_nop ());
699c914a
MS
16113 emit_insn (gen_nop ());
16114 emit_insn (gen_nop ());
16115 }
16116
16117 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16118 {
16119 reg_mode = V2SImode;
16120 reg_size = 8;
16121 }
a3170dc6 16122
f78c3290
NF
16123 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16124 /*static_chain_p=*/using_static_chain_p,
16125 /*sibcall=*/0);
16126 using_store_multiple = strategy & SAVRES_MULTIPLE;
16127 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16128 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16129
16130 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16131 if (! WORLD_SAVE_P (info)
16132 && info->push_p
acd0b319 16133 && (DEFAULT_ABI == ABI_V4
e3b5732b 16134 || crtl->calls_eh_return))
9ebbca7d 16135 {
f78c3290
NF
16136 bool need_r11 = (TARGET_SPE
16137 ? (!saving_GPRs_inline
16138 && info->spe_64bit_regs_used == 0)
16139 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16140 if (info->total_size < 32767)
16141 sp_offset = info->total_size;
16142 else
f78c3290
NF
16143 frame_reg_rtx = (need_r11
16144 ? gen_rtx_REG (Pmode, 11)
16145 : frame_ptr_rtx);
f676971a 16146 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16147 (frame_reg_rtx != sp_reg_rtx
16148 && (info->cr_save_p
16149 || info->lr_save_p
16150 || info->first_fp_reg_save < 64
16151 || info->first_gp_reg_save < 32
f78c3290
NF
16152 )),
16153 need_r11);
9ebbca7d
GK
16154 if (frame_reg_rtx != sp_reg_rtx)
16155 rs6000_emit_stack_tie ();
16156 }
16157
d62294f5 16158 /* Handle world saves specially here. */
f57fe068 16159 if (WORLD_SAVE_P (info))
d62294f5
FJ
16160 {
16161 int i, j, sz;
16162 rtx treg;
16163 rtvec p;
22fa69da 16164 rtx reg0;
d62294f5
FJ
16165
16166 /* save_world expects lr in r0. */
22fa69da 16167 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16168 if (info->lr_save_p)
c4ad648e 16169 {
22fa69da 16170 insn = emit_move_insn (reg0,
1de43f85 16171 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16172 RTX_FRAME_RELATED_P (insn) = 1;
16173 }
d62294f5
FJ
16174
16175 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16176 assumptions about the offsets of various bits of the stack
992d08b1 16177 frame. */
37409796
NS
16178 gcc_assert (info->gp_save_offset == -220
16179 && info->fp_save_offset == -144
16180 && info->lr_save_offset == 8
16181 && info->cr_save_offset == 4
16182 && info->push_p
16183 && info->lr_save_p
e3b5732b 16184 && (!crtl->calls_eh_return
37409796
NS
16185 || info->ehrd_offset == -432)
16186 && info->vrsave_save_offset == -224
22fa69da 16187 && info->altivec_save_offset == -416);
d62294f5
FJ
16188
16189 treg = gen_rtx_REG (SImode, 11);
16190 emit_move_insn (treg, GEN_INT (-info->total_size));
16191
16192 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16193 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16194
16195 /* Preserve CR2 for save_world prologues */
22fa69da 16196 sz = 5;
d62294f5
FJ
16197 sz += 32 - info->first_gp_reg_save;
16198 sz += 64 - info->first_fp_reg_save;
16199 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16200 p = rtvec_alloc (sz);
16201 j = 0;
16202 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16203 gen_rtx_REG (SImode,
1de43f85 16204 LR_REGNO));
d62294f5 16205 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16206 gen_rtx_SYMBOL_REF (Pmode,
16207 "*save_world"));
d62294f5 16208 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16209 properly. */
16210 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16211 {
696e45ba
ME
16212 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16213 ? DFmode : SFmode),
16214 info->first_fp_reg_save + i);
c4ad648e
AM
16215 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16216 GEN_INT (info->fp_save_offset
16217 + sp_offset + 8 * i));
696e45ba
ME
16218 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16219 ? DFmode : SFmode), addr);
c4ad648e
AM
16220
16221 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16222 }
d62294f5 16223 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16224 {
16225 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16226 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16227 GEN_INT (info->altivec_save_offset
16228 + sp_offset + 16 * i));
0be76840 16229 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16230
16231 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16232 }
d62294f5 16233 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16234 {
16235 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16236 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16237 GEN_INT (info->gp_save_offset
16238 + sp_offset + reg_size * i));
0be76840 16239 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16240
16241 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16242 }
16243
16244 {
16245 /* CR register traditionally saved as CR2. */
16246 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16247 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16248 GEN_INT (info->cr_save_offset
16249 + sp_offset));
0be76840 16250 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16251
16252 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16253 }
22fa69da
GK
16254 /* Explain about use of R0. */
16255 if (info->lr_save_p)
16256 {
16257 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16258 GEN_INT (info->lr_save_offset
16259 + sp_offset));
16260 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16261
22fa69da
GK
16262 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16263 }
16264 /* Explain what happens to the stack pointer. */
16265 {
16266 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16267 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16268 }
d62294f5
FJ
16269
16270 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16271 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16272 treg, GEN_INT (-info->total_size));
16273 sp_offset = info->total_size;
d62294f5
FJ
16274 }
16275
9ebbca7d 16276 /* If we use the link register, get it into r0. */
f57fe068 16277 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16278 {
52ff33d0
NF
16279 rtx addr, reg, mem;
16280
f8a57be8 16281 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16282 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16283 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16284
16285 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16286 GEN_INT (info->lr_save_offset + sp_offset));
16287 reg = gen_rtx_REG (Pmode, 0);
16288 mem = gen_rtx_MEM (Pmode, addr);
16289 /* This should not be of rs6000_sr_alias_set, because of
16290 __builtin_return_address. */
16291
16292 insn = emit_move_insn (mem, reg);
16293 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16294 NULL_RTX, NULL_RTX);
f8a57be8 16295 }
9ebbca7d
GK
16296
16297 /* If we need to save CR, put it into r12. */
f57fe068 16298 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16299 {
f8a57be8 16300 rtx set;
f676971a 16301
9ebbca7d 16302 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16303 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16304 RTX_FRAME_RELATED_P (insn) = 1;
16305 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16306 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16307 But that's OK. All we have to do is specify that _one_ condition
16308 code register is saved in this stack slot. The thrower's epilogue
16309 will then restore all the call-saved registers.
16310 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16311 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16312 gen_rtx_REG (SImode, CR2_REGNO));
bbbbb16a 16313 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16314 }
16315
a4f6c312
SS
16316 /* Do any required saving of fpr's. If only one or two to save, do
16317 it ourselves. Otherwise, call function. */
f57fe068 16318 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16319 {
16320 int i;
16321 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16322 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16323 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16324 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16325 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16326 ? DFmode : SFmode,
89e7058f
AH
16327 info->first_fp_reg_save + i,
16328 info->fp_save_offset + sp_offset + 8 * i,
16329 info->total_size);
9ebbca7d 16330 }
f57fe068 16331 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16332 {
16333 rtx par;
16334
16335 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16336 info->fp_save_offset + sp_offset,
16337 DFmode,
16338 /*savep=*/true, /*gpr=*/false,
16339 /*exitp=*/false);
16340 insn = emit_insn (par);
16341 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16342 NULL_RTX, NULL_RTX);
16343 }
16344
16345 /* Save GPRs. This is done as a PARALLEL if we are using
16346 the store-multiple instructions. */
16347 if (!WORLD_SAVE_P (info)
16348 && TARGET_SPE_ABI
16349 && info->spe_64bit_regs_used != 0
16350 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16351 {
16352 int i;
f78c3290
NF
16353 rtx spe_save_area_ptr;
16354
16355 /* Determine whether we can address all of the registers that need
16356 to be saved with an offset from the stack pointer that fits in
16357 the small const field for SPE memory instructions. */
16358 int spe_regs_addressable_via_sp
16359 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16360 + (32 - info->first_gp_reg_save - 1) * reg_size)
16361 && saving_GPRs_inline);
16362 int spe_offset;
16363
16364 if (spe_regs_addressable_via_sp)
16365 {
16366 spe_save_area_ptr = frame_reg_rtx;
16367 spe_offset = info->spe_gp_save_offset + sp_offset;
16368 }
16369 else
16370 {
16371 /* Make r11 point to the start of the SPE save area. We need
16372 to be careful here if r11 is holding the static chain. If
16373 it is, then temporarily save it in r0. We would use r0 as
16374 our base register here, but using r0 as a base register in
16375 loads and stores means something different from what we
16376 would like. */
16377 int ool_adjust = (saving_GPRs_inline
16378 ? 0
16379 : (info->first_gp_reg_save
16380 - (FIRST_SAVRES_REGISTER+1))*8);
16381 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16382 + sp_offset - ool_adjust);
16383
16384 if (using_static_chain_p)
16385 {
16386 rtx r0 = gen_rtx_REG (Pmode, 0);
16387 gcc_assert (info->first_gp_reg_save > 11);
16388
16389 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16390 }
16391
16392 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16393 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16394 frame_reg_rtx,
16395 GEN_INT (offset)));
16396 /* We need to make sure the move to r11 gets noted for
16397 properly outputting unwind information. */
16398 if (!saving_GPRs_inline)
16399 rs6000_frame_related (insn, frame_reg_rtx, offset,
16400 NULL_RTX, NULL_RTX);
16401 spe_offset = 0;
16402 }
16403
16404 if (saving_GPRs_inline)
16405 {
16406 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16407 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16408 {
16409 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16410 rtx offset, addr, mem;
f676971a 16411
f78c3290
NF
16412 /* We're doing all this to ensure that the offset fits into
16413 the immediate offset of 'evstdd'. */
16414 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16415
16416 offset = GEN_INT (reg_size * i + spe_offset);
16417 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16418 mem = gen_rtx_MEM (V2SImode, addr);
16419
16420 insn = emit_move_insn (mem, reg);
16421
16422 rs6000_frame_related (insn, spe_save_area_ptr,
16423 info->spe_gp_save_offset
16424 + sp_offset + reg_size * i,
16425 offset, const0_rtx);
16426 }
16427 }
16428 else
9ebbca7d 16429 {
f78c3290 16430 rtx par;
9ebbca7d 16431
f78c3290
NF
16432 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16433 0, reg_mode,
16434 /*savep=*/true, /*gpr=*/true,
16435 /*exitp=*/false);
16436 insn = emit_insn (par);
16437 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16438 NULL_RTX, NULL_RTX);
9ebbca7d 16439 }
f78c3290
NF
16440
16441
16442 /* Move the static chain pointer back. */
16443 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16444 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16445 }
16446 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16447 {
16448 rtx par;
16449
16450 /* Need to adjust r11 if we saved any FPRs. */
16451 if (info->first_fp_reg_save != 64)
16452 {
16453 rtx r11 = gen_rtx_REG (reg_mode, 11);
16454 rtx offset = GEN_INT (info->total_size
16455 + (-8 * (64-info->first_fp_reg_save)));
16456 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16457 ? sp_reg_rtx : r11);
16458
16459 emit_insn (TARGET_32BIT
16460 ? gen_addsi3 (r11, ptr_reg, offset)
16461 : gen_adddi3 (r11, ptr_reg, offset));
16462 }
16463
16464 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16465 info->gp_save_offset + sp_offset,
16466 reg_mode,
16467 /*savep=*/true, /*gpr=*/true,
16468 /*exitp=*/false);
16469 insn = emit_insn (par);
f676971a 16470 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16471 NULL_RTX, NULL_RTX);
16472 }
f78c3290 16473 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16474 {
308c142a 16475 rtvec p;
9ebbca7d
GK
16476 int i;
16477 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16478 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16479 {
16480 rtx addr, reg, mem;
16481 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16482 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16483 GEN_INT (info->gp_save_offset
16484 + sp_offset
9ebbca7d 16485 + reg_size * i));
0be76840 16486 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16487
16488 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16489 }
16490 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16491 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16492 NULL_RTX, NULL_RTX);
b6c9286a 16493 }
f57fe068 16494 else if (!WORLD_SAVE_P (info))
b6c9286a 16495 {
9ebbca7d
GK
16496 int i;
16497 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16498 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16499 {
16500 rtx addr, reg, mem;
16501 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16502
52ff33d0
NF
16503 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16504 GEN_INT (info->gp_save_offset
16505 + sp_offset
16506 + reg_size * i));
16507 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16508
52ff33d0
NF
16509 insn = emit_move_insn (mem, reg);
16510 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16511 NULL_RTX, NULL_RTX);
16512 }
9ebbca7d
GK
16513 }
16514
83720594
RH
16515 /* ??? There's no need to emit actual instructions here, but it's the
16516 easiest way to get the frame unwind information emitted. */
e3b5732b 16517 if (crtl->calls_eh_return)
83720594 16518 {
78e1b90d
DE
16519 unsigned int i, regno;
16520
fc4767bb
JJ
16521 /* In AIX ABI we need to pretend we save r2 here. */
16522 if (TARGET_AIX)
16523 {
16524 rtx addr, reg, mem;
16525
16526 reg = gen_rtx_REG (reg_mode, 2);
16527 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16528 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16529 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16530
16531 insn = emit_move_insn (mem, reg);
f676971a 16532 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16533 NULL_RTX, NULL_RTX);
16534 PATTERN (insn) = gen_blockage ();
16535 }
16536
83720594
RH
16537 for (i = 0; ; ++i)
16538 {
83720594
RH
16539 regno = EH_RETURN_DATA_REGNO (i);
16540 if (regno == INVALID_REGNUM)
16541 break;
16542
89e7058f
AH
16543 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16544 info->ehrd_offset + sp_offset
16545 + reg_size * (int) i,
16546 info->total_size);
83720594
RH
16547 }
16548 }
16549
9ebbca7d 16550 /* Save CR if we use any that must be preserved. */
f57fe068 16551 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16552 {
16553 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16554 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16555 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16556 /* See the large comment above about why CR2_REGNO is used. */
16557 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16558
9ebbca7d
GK
16559 /* If r12 was used to hold the original sp, copy cr into r0 now
16560 that it's free. */
16561 if (REGNO (frame_reg_rtx) == 12)
16562 {
f8a57be8
GK
16563 rtx set;
16564
9ebbca7d 16565 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16566 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16567 RTX_FRAME_RELATED_P (insn) = 1;
16568 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
bbbbb16a 16569 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16570 }
16571 insn = emit_move_insn (mem, cr_save_rtx);
16572
f676971a 16573 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16574 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16575 }
16576
f676971a 16577 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16578 for which it was done previously. */
f57fe068 16579 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16580 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16581 {
bcb2d701 16582 if (info->total_size < 32767)
2b2c2fe5 16583 sp_offset = info->total_size;
bcb2d701
EC
16584 else
16585 frame_reg_rtx = frame_ptr_rtx;
16586 rs6000_emit_allocate_stack (info->total_size,
16587 (frame_reg_rtx != sp_reg_rtx
16588 && ((info->altivec_size != 0)
16589 || (info->vrsave_mask != 0)
f78c3290
NF
16590 )),
16591 FALSE);
bcb2d701
EC
16592 if (frame_reg_rtx != sp_reg_rtx)
16593 rs6000_emit_stack_tie ();
2b2c2fe5 16594 }
9ebbca7d
GK
16595
16596 /* Set frame pointer, if needed. */
16597 if (frame_pointer_needed)
16598 {
7d5175e1 16599 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16600 sp_reg_rtx);
16601 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16602 }
9878760c 16603
2b2c2fe5
EC
16604 /* Save AltiVec registers if needed. Save here because the red zone does
16605 not include AltiVec registers. */
16606 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16607 {
16608 int i;
16609
16610 /* There should be a non inline version of this, for when we
16611 are saving lots of vector registers. */
16612 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16613 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16614 {
16615 rtx areg, savereg, mem;
16616 int offset;
16617
16618 offset = info->altivec_save_offset + sp_offset
16619 + 16 * (i - info->first_altivec_reg_save);
16620
16621 savereg = gen_rtx_REG (V4SImode, i);
16622
16623 areg = gen_rtx_REG (Pmode, 0);
16624 emit_move_insn (areg, GEN_INT (offset));
16625
16626 /* AltiVec addressing mode is [reg+reg]. */
16627 mem = gen_frame_mem (V4SImode,
16628 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16629
16630 insn = emit_move_insn (mem, savereg);
16631
16632 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16633 areg, GEN_INT (offset));
16634 }
16635 }
16636
16637 /* VRSAVE is a bit vector representing which AltiVec registers
16638 are used. The OS uses this to determine which vector
16639 registers to save on a context switch. We need to save
16640 VRSAVE on the stack frame, add whatever AltiVec registers we
16641 used in this function, and do the corresponding magic in the
16642 epilogue. */
16643
16644 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16645 && info->vrsave_mask != 0)
16646 {
16647 rtx reg, mem, vrsave;
16648 int offset;
16649
16650 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16651 as frame_reg_rtx and r11 as the static chain pointer for
16652 nested functions. */
16653 reg = gen_rtx_REG (SImode, 0);
16654 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16655 if (TARGET_MACHO)
16656 emit_insn (gen_get_vrsave_internal (reg));
16657 else
16658 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16659
16660 if (!WORLD_SAVE_P (info))
16661 {
16662 /* Save VRSAVE. */
16663 offset = info->vrsave_save_offset + sp_offset;
16664 mem = gen_frame_mem (SImode,
16665 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16666 GEN_INT (offset)));
16667 insn = emit_move_insn (mem, reg);
16668 }
16669
16670 /* Include the registers in the mask. */
16671 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16672
16673 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16674 }
16675
1db02437 16676 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16677 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16678 || (DEFAULT_ABI == ABI_V4
16679 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16680 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16681 {
16682 /* If emit_load_toc_table will use the link register, we need to save
16683 it. We use R12 for this purpose because emit_load_toc_table
16684 can use register 0. This allows us to use a plain 'blr' to return
16685 from the procedure more often. */
16686 int save_LR_around_toc_setup = (TARGET_ELF
16687 && DEFAULT_ABI != ABI_AIX
16688 && flag_pic
16689 && ! info->lr_save_p
16690 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16691 if (save_LR_around_toc_setup)
16692 {
1de43f85 16693 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16694
c4ad648e 16695 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16696 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16697
c4ad648e 16698 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16699
c4ad648e 16700 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16701 RTX_FRAME_RELATED_P (insn) = 1;
16702 }
16703 else
16704 rs6000_emit_load_toc_table (TRUE);
16705 }
ee890fe2 16706
fcce224d 16707#if TARGET_MACHO
ee890fe2 16708 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16709 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16710 {
1de43f85 16711 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 16712 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 16713
6d0a8091
DJ
16714 /* Save and restore LR locally around this call (in R0). */
16715 if (!info->lr_save_p)
6fb5fa3c 16716 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16717
6fb5fa3c 16718 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16719
6fb5fa3c
DB
16720 emit_move_insn (gen_rtx_REG (Pmode,
16721 RS6000_PIC_OFFSET_TABLE_REGNUM),
16722 lr);
6d0a8091
DJ
16723
16724 if (!info->lr_save_p)
6fb5fa3c 16725 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16726 }
fcce224d 16727#endif
9ebbca7d
GK
16728}
16729
9ebbca7d 16730/* Write function prologue. */
a4f6c312 16731
08c148a8 16732static void
f676971a 16733rs6000_output_function_prologue (FILE *file,
a2369ed3 16734 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16735{
16736 rs6000_stack_t *info = rs6000_stack_info ();
16737
4697a36c
MM
16738 if (TARGET_DEBUG_STACK)
16739 debug_stack_info (info);
9878760c 16740
a4f6c312
SS
16741 /* Write .extern for any function we will call to save and restore
16742 fp values. */
16743 if (info->first_fp_reg_save < 64
16744 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16745 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16746 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16747 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16748
c764f757
RK
16749 /* Write .extern for AIX common mode routines, if needed. */
16750 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16751 {
f6709c70
JW
16752 fputs ("\t.extern __mulh\n", file);
16753 fputs ("\t.extern __mull\n", file);
16754 fputs ("\t.extern __divss\n", file);
16755 fputs ("\t.extern __divus\n", file);
16756 fputs ("\t.extern __quoss\n", file);
16757 fputs ("\t.extern __quous\n", file);
c764f757
RK
16758 common_mode_defined = 1;
16759 }
9878760c 16760
9ebbca7d 16761 if (! HAVE_prologue)
979721f8 16762 {
9ebbca7d 16763 start_sequence ();
9dda4cc8 16764
a4f6c312
SS
16765 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16766 the "toplevel" insn chain. */
2e040219 16767 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16768 rs6000_emit_prologue ();
2e040219 16769 emit_note (NOTE_INSN_DELETED);
178c3eff 16770
a3c9585f 16771 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16772 {
16773 rtx insn;
16774 unsigned addr = 0;
16775 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16776 {
16777 INSN_ADDRESSES_NEW (insn, addr);
16778 addr += 4;
16779 }
16780 }
9dda4cc8 16781
9ebbca7d 16782 if (TARGET_DEBUG_STACK)
a4f6c312 16783 debug_rtx_list (get_insns (), 100);
c9d691e9 16784 final (get_insns (), file, FALSE);
9ebbca7d 16785 end_sequence ();
979721f8
MM
16786 }
16787
9ebbca7d
GK
16788 rs6000_pic_labelno++;
16789}
f676971a 16790
1c9c5e43
AM
16791/* Non-zero if vmx regs are restored before the frame pop, zero if
16792 we restore after the pop when possible. */
16793#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16794
f78c3290
NF
16795/* Reload CR from REG. */
16796
16797static void
16798rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16799{
16800 int count = 0;
16801 int i;
16802
16803 if (using_mfcr_multiple)
16804 {
16805 for (i = 0; i < 8; i++)
16806 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16807 count++;
16808 gcc_assert (count);
16809 }
16810
16811 if (using_mfcr_multiple && count > 1)
16812 {
16813 rtvec p;
16814 int ndx;
16815
16816 p = rtvec_alloc (count);
16817
16818 ndx = 0;
16819 for (i = 0; i < 8; i++)
16820 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16821 {
16822 rtvec r = rtvec_alloc (2);
16823 RTVEC_ELT (r, 0) = reg;
16824 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16825 RTVEC_ELT (p, ndx) =
16826 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16827 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16828 ndx++;
16829 }
16830 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16831 gcc_assert (ndx == count);
16832 }
16833 else
16834 for (i = 0; i < 8; i++)
16835 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16836 {
16837 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16838 CR0_REGNO+i),
16839 reg));
16840 }
16841}
16842
ff35822b
JJ
16843/* Return true if OFFSET from stack pointer can be clobbered by signals.
16844 V.4 doesn't have any stack cushion, AIX ABIs have 220 or 288 bytes
16845 below stack pointer not cloberred by signals. */
9878760c 16846
ff35822b
JJ
16847static inline bool
16848offset_below_red_zone_p (HOST_WIDE_INT offset)
16849{
16850 return offset < (DEFAULT_ABI == ABI_V4
16851 ? 0
16852 : TARGET_32BIT ? -220 : -288);
16853}
16854
16855/* Emit function epilogue as insns. */
9878760c 16856
9ebbca7d 16857void
a2369ed3 16858rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16859{
16860 rs6000_stack_t *info;
f78c3290 16861 int restoring_GPRs_inline;
9ebbca7d
GK
16862 int restoring_FPRs_inline;
16863 int using_load_multiple;
d296e02e 16864 int using_mtcr_multiple;
9ebbca7d 16865 int use_backchain_to_restore_sp;
f78c3290
NF
16866 int restore_lr;
16867 int strategy;
9ebbca7d
GK
16868 int sp_offset = 0;
16869 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16870 rtx frame_reg_rtx = sp_reg_rtx;
ff35822b
JJ
16871 rtx cfa_restores = NULL_RTX;
16872 rtx insn;
0e67400a 16873 enum machine_mode reg_mode = Pmode;
327e5343 16874 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16875 int i;
16876
c19de7aa
AH
16877 info = rs6000_stack_info ();
16878
16879 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16880 {
16881 reg_mode = V2SImode;
16882 reg_size = 8;
16883 }
16884
f78c3290
NF
16885 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16886 /*static_chain_p=*/0, sibcall);
16887 using_load_multiple = strategy & SAVRES_MULTIPLE;
16888 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16889 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16890 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16891 || rs6000_cpu == PROCESSOR_PPC603
16892 || rs6000_cpu == PROCESSOR_PPC750
16893 || optimize_size);
1c9c5e43
AM
16894 /* Restore via the backchain when we have a large frame, since this
16895 is more efficient than an addis, addi pair. The second condition
16896 here will not trigger at the moment; We don't actually need a
16897 frame pointer for alloca, but the generic parts of the compiler
16898 give us one anyway. */
16899 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16900 || info->total_size
16901 + (info->lr_save_p ? info->lr_save_offset : 0)
16902 > 32767
1c9c5e43
AM
16903 || (cfun->calls_alloca
16904 && !frame_pointer_needed));
f78c3290
NF
16905 restore_lr = (info->lr_save_p
16906 && restoring_GPRs_inline
16907 && restoring_FPRs_inline);
9ebbca7d 16908
f57fe068 16909 if (WORLD_SAVE_P (info))
d62294f5
FJ
16910 {
16911 int i, j;
16912 char rname[30];
16913 const char *alloc_rname;
16914 rtvec p;
16915
16916 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16917 stack slot (which is not likely to be our caller.)
16918 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16919 rest_world is similar, except any R10 parameter is ignored.
16920 The exception-handling stuff that was here in 2.95 is no
16921 longer necessary. */
d62294f5
FJ
16922
16923 p = rtvec_alloc (9
16924 + 1
f676971a 16925 + 32 - info->first_gp_reg_save
c4ad648e
AM
16926 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16927 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16928
e3b5732b 16929 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16930 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16931 alloc_rname = ggc_strdup (rname);
16932
16933 j = 0;
16934 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16935 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16936 gen_rtx_REG (Pmode,
1de43f85 16937 LR_REGNO));
d62294f5 16938 RTVEC_ELT (p, j++)
c4ad648e 16939 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16940 /* The instruction pattern requires a clobber here;
c4ad648e 16941 it is shared with the restVEC helper. */
d62294f5 16942 RTVEC_ELT (p, j++)
c4ad648e 16943 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16944
16945 {
c4ad648e
AM
16946 /* CR register traditionally saved as CR2. */
16947 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16948 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16949 GEN_INT (info->cr_save_offset));
0be76840 16950 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16951
16952 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16953 }
16954
16955 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16956 {
16957 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16958 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16959 GEN_INT (info->gp_save_offset
16960 + reg_size * i));
0be76840 16961 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16962
16963 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16964 }
d62294f5 16965 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16966 {
16967 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16968 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16969 GEN_INT (info->altivec_save_offset
16970 + 16 * i));
0be76840 16971 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16972
16973 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16974 }
d62294f5 16975 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 16976 {
696e45ba
ME
16977 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16978 ? DFmode : SFmode),
16979 info->first_fp_reg_save + i);
c4ad648e
AM
16980 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16981 GEN_INT (info->fp_save_offset
16982 + 8 * i));
696e45ba
ME
16983 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16984 ? DFmode : SFmode), addr);
c4ad648e
AM
16985
16986 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16987 }
d62294f5 16988 RTVEC_ELT (p, j++)
c4ad648e 16989 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16990 RTVEC_ELT (p, j++)
c4ad648e 16991 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16992 RTVEC_ELT (p, j++)
c4ad648e 16993 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16994 RTVEC_ELT (p, j++)
c4ad648e 16995 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16996 RTVEC_ELT (p, j++)
c4ad648e 16997 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16998 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16999
17000 return;
17001 }
17002
45b194f8
AM
17003 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17004 if (info->push_p)
2b2c2fe5 17005 sp_offset = info->total_size;
f676971a 17006
e6477eaa
AM
17007 /* Restore AltiVec registers if we must do so before adjusting the
17008 stack. */
17009 if (TARGET_ALTIVEC_ABI
17010 && info->altivec_size != 0
1c9c5e43
AM
17011 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17012 || (DEFAULT_ABI != ABI_V4
ff35822b 17013 && offset_below_red_zone_p (info->altivec_save_offset))))
9aa86737
AH
17014 {
17015 int i;
17016
e6477eaa
AM
17017 if (use_backchain_to_restore_sp)
17018 {
17019 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17020 emit_move_insn (frame_reg_rtx,
17021 gen_rtx_MEM (Pmode, sp_reg_rtx));
17022 sp_offset = 0;
17023 }
1c9c5e43
AM
17024 else if (frame_pointer_needed)
17025 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17026
9aa86737
AH
17027 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17028 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17029 {
ff35822b 17030 rtx addr, areg, mem, reg;
9aa86737
AH
17031
17032 areg = gen_rtx_REG (Pmode, 0);
17033 emit_move_insn
17034 (areg, GEN_INT (info->altivec_save_offset
17035 + sp_offset
17036 + 16 * (i - info->first_altivec_reg_save)));
17037
17038 /* AltiVec addressing mode is [reg+reg]. */
17039 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17040 mem = gen_frame_mem (V4SImode, addr);
9aa86737 17041
ff35822b
JJ
17042 reg = gen_rtx_REG (V4SImode, i);
17043 emit_move_insn (reg, mem);
17044 if (offset_below_red_zone_p (info->altivec_save_offset
17045 + (i - info->first_altivec_reg_save)
17046 * 16))
17047 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17048 cfa_restores);
9aa86737
AH
17049 }
17050 }
17051
e6477eaa
AM
17052 /* Restore VRSAVE if we must do so before adjusting the stack. */
17053 if (TARGET_ALTIVEC
17054 && TARGET_ALTIVEC_VRSAVE
17055 && info->vrsave_mask != 0
1c9c5e43
AM
17056 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17057 || (DEFAULT_ABI != ABI_V4
ff35822b 17058 && offset_below_red_zone_p (info->vrsave_save_offset))))
e6477eaa
AM
17059 {
17060 rtx addr, mem, reg;
17061
1c9c5e43 17062 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17063 {
1c9c5e43
AM
17064 if (use_backchain_to_restore_sp)
17065 {
17066 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17067 emit_move_insn (frame_reg_rtx,
17068 gen_rtx_MEM (Pmode, sp_reg_rtx));
17069 sp_offset = 0;
17070 }
17071 else if (frame_pointer_needed)
17072 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17073 }
17074
17075 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17076 GEN_INT (info->vrsave_save_offset + sp_offset));
17077 mem = gen_frame_mem (SImode, addr);
17078 reg = gen_rtx_REG (SImode, 12);
17079 emit_move_insn (reg, mem);
17080
17081 emit_insn (generate_set_vrsave (reg, info, 1));
17082 }
17083
ff35822b 17084 insn = NULL_RTX;
1c9c5e43
AM
17085 /* If we have a large stack frame, restore the old stack pointer
17086 using the backchain. */
2b2c2fe5
EC
17087 if (use_backchain_to_restore_sp)
17088 {
1c9c5e43 17089 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17090 {
17091 /* Under V.4, don't reset the stack pointer until after we're done
17092 loading the saved registers. */
17093 if (DEFAULT_ABI == ABI_V4)
17094 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17095
ff35822b
JJ
17096 insn = emit_move_insn (frame_reg_rtx,
17097 gen_rtx_MEM (Pmode, sp_reg_rtx));
e6477eaa
AM
17098 sp_offset = 0;
17099 }
1c9c5e43
AM
17100 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17101 && DEFAULT_ABI == ABI_V4)
17102 /* frame_reg_rtx has been set up by the altivec restore. */
17103 ;
17104 else
17105 {
ff35822b 17106 insn = emit_move_insn (sp_reg_rtx, frame_reg_rtx);
1c9c5e43
AM
17107 frame_reg_rtx = sp_reg_rtx;
17108 }
17109 }
17110 /* If we have a frame pointer, we can restore the old stack pointer
17111 from it. */
17112 else if (frame_pointer_needed)
17113 {
17114 frame_reg_rtx = sp_reg_rtx;
17115 if (DEFAULT_ABI == ABI_V4)
17116 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17117
ff35822b
JJ
17118 insn = emit_insn (gen_add3_insn (frame_reg_rtx, hard_frame_pointer_rtx,
17119 GEN_INT (info->total_size)));
1c9c5e43 17120 sp_offset = 0;
2b2c2fe5 17121 }
45b194f8
AM
17122 else if (info->push_p
17123 && DEFAULT_ABI != ABI_V4
e3b5732b 17124 && !crtl->calls_eh_return)
2b2c2fe5 17125 {
ff35822b
JJ
17126 insn = emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx,
17127 GEN_INT (info->total_size)));
45b194f8 17128 sp_offset = 0;
2b2c2fe5 17129 }
ff35822b
JJ
17130 if (insn && frame_reg_rtx == sp_reg_rtx)
17131 {
17132 if (cfa_restores)
17133 {
17134 REG_NOTES (insn) = cfa_restores;
17135 cfa_restores = NULL_RTX;
17136 }
17137 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17138 RTX_FRAME_RELATED_P (insn) = 1;
17139 }
2b2c2fe5 17140
e6477eaa 17141 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17142 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17143 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17144 && info->altivec_size != 0
17145 && (DEFAULT_ABI == ABI_V4
ff35822b 17146 || !offset_below_red_zone_p (info->altivec_save_offset)))
e6477eaa
AM
17147 {
17148 int i;
17149
17150 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17151 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17152 {
ff35822b 17153 rtx addr, areg, mem, reg;
e6477eaa
AM
17154
17155 areg = gen_rtx_REG (Pmode, 0);
17156 emit_move_insn
17157 (areg, GEN_INT (info->altivec_save_offset
17158 + sp_offset
17159 + 16 * (i - info->first_altivec_reg_save)));
17160
17161 /* AltiVec addressing mode is [reg+reg]. */
17162 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17163 mem = gen_frame_mem (V4SImode, addr);
17164
ff35822b
JJ
17165 reg = gen_rtx_REG (V4SImode, i);
17166 emit_move_insn (reg, mem);
17167 if (DEFAULT_ABI == ABI_V4)
17168 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17169 cfa_restores);
e6477eaa
AM
17170 }
17171 }
17172
17173 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17174 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17175 && TARGET_ALTIVEC
e6477eaa
AM
17176 && TARGET_ALTIVEC_VRSAVE
17177 && info->vrsave_mask != 0
17178 && (DEFAULT_ABI == ABI_V4
ff35822b 17179 || !offset_below_red_zone_p (info->vrsave_save_offset)))
554c2941
AM
17180 {
17181 rtx addr, mem, reg;
17182
17183 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17184 GEN_INT (info->vrsave_save_offset + sp_offset));
17185 mem = gen_frame_mem (SImode, addr);
17186 reg = gen_rtx_REG (SImode, 12);
17187 emit_move_insn (reg, mem);
17188
17189 emit_insn (generate_set_vrsave (reg, info, 1));
17190 }
17191
f78c3290
NF
17192 /* Get the old lr if we saved it. If we are restoring registers
17193 out-of-line, then the out-of-line routines can do this for us. */
17194 if (restore_lr)
b6c9286a 17195 {
a3170dc6
AH
17196 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17197 info->lr_save_offset + sp_offset);
ba4828e0 17198
9ebbca7d 17199 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17200 }
f676971a 17201
9ebbca7d
GK
17202 /* Get the old cr if we saved it. */
17203 if (info->cr_save_p)
17204 {
17205 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17206 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17207 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17208
9ebbca7d
GK
17209 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17210 }
f676971a 17211
ff35822b
JJ
17212 /* Set LR here to try to overlap restores below. LR is always saved
17213 above incoming stack, so it never needs REG_CFA_RESTORE. */
f78c3290 17214 if (restore_lr)
1de43f85 17215 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17216 gen_rtx_REG (Pmode, 0));
f676971a 17217
83720594 17218 /* Load exception handler data registers, if needed. */
e3b5732b 17219 if (crtl->calls_eh_return)
83720594 17220 {
78e1b90d
DE
17221 unsigned int i, regno;
17222
fc4767bb
JJ
17223 if (TARGET_AIX)
17224 {
17225 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17226 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17227 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17228
17229 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17230 }
17231
83720594
RH
17232 for (i = 0; ; ++i)
17233 {
a3170dc6 17234 rtx mem;
83720594
RH
17235
17236 regno = EH_RETURN_DATA_REGNO (i);
17237 if (regno == INVALID_REGNUM)
17238 break;
17239
a3170dc6
AH
17240 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17241 info->ehrd_offset + sp_offset
17242 + reg_size * (int) i);
83720594
RH
17243
17244 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17245 }
17246 }
f676971a 17247
9ebbca7d
GK
17248 /* Restore GPRs. This is done as a PARALLEL if we are using
17249 the load-multiple instructions. */
f78c3290
NF
17250 if (TARGET_SPE_ABI
17251 && info->spe_64bit_regs_used != 0
17252 && info->first_gp_reg_save != 32)
52ff33d0 17253 {
52ff33d0
NF
17254 /* Determine whether we can address all of the registers that need
17255 to be saved with an offset from the stack pointer that fits in
17256 the small const field for SPE memory instructions. */
17257 int spe_regs_addressable_via_sp
f78c3290
NF
17258 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17259 + (32 - info->first_gp_reg_save - 1) * reg_size)
17260 && restoring_GPRs_inline);
52ff33d0
NF
17261 int spe_offset;
17262
17263 if (spe_regs_addressable_via_sp)
45b194f8 17264 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17265 else
17266 {
45b194f8 17267 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17268 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17269 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17270 There's no need to worry here because the static chain is passed
17271 anew to every function. */
f78c3290
NF
17272 int ool_adjust = (restoring_GPRs_inline
17273 ? 0
17274 : (info->first_gp_reg_save
17275 - (FIRST_SAVRES_REGISTER+1))*8);
17276
45b194f8
AM
17277 if (frame_reg_rtx == sp_reg_rtx)
17278 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17279 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17280 GEN_INT (info->spe_gp_save_offset
17281 + sp_offset
17282 - ool_adjust)));
45b194f8
AM
17283 /* Keep the invariant that frame_reg_rtx + sp_offset points
17284 at the top of the stack frame. */
17285 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17286
17287 spe_offset = 0;
17288 }
17289
f78c3290
NF
17290 if (restoring_GPRs_inline)
17291 {
17292 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17293 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17294 {
ff35822b 17295 rtx offset, addr, mem, reg;
52ff33d0 17296
f78c3290
NF
17297 /* We're doing all this to ensure that the immediate offset
17298 fits into the immediate field of 'evldd'. */
17299 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17300
f78c3290
NF
17301 offset = GEN_INT (spe_offset + reg_size * i);
17302 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17303 mem = gen_rtx_MEM (V2SImode, addr);
ff35822b 17304 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
52ff33d0 17305
ff35822b
JJ
17306 insn = emit_move_insn (reg, mem);
17307 if (DEFAULT_ABI == ABI_V4)
17308 {
17309 if (frame_pointer_needed
17310 && info->first_gp_reg_save + i
17311 == HARD_FRAME_POINTER_REGNUM)
17312 {
17313 add_reg_note (insn, REG_CFA_DEF_CFA,
17314 plus_constant (frame_reg_rtx,
17315 sp_offset));
17316 RTX_FRAME_RELATED_P (insn) = 1;
17317 }
17318
17319 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17320 cfa_restores);
17321 }
f78c3290
NF
17322 }
17323 }
17324 else
17325 {
17326 rtx par;
17327
17328 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17329 0, reg_mode,
17330 /*savep=*/false, /*gpr=*/true,
17331 /*exitp=*/true);
17332 emit_jump_insn (par);
f78c3290
NF
17333 /* We don't want anybody else emitting things after we jumped
17334 back. */
17335 return;
17336 }
52ff33d0 17337 }
f78c3290
NF
17338 else if (!restoring_GPRs_inline)
17339 {
17340 /* We are jumping to an out-of-line function. */
17341 bool can_use_exit = info->first_fp_reg_save == 64;
17342 rtx par;
17343
17344 /* Emit stack reset code if we need it. */
17345 if (can_use_exit)
17346 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17347 sp_offset, can_use_exit);
17348 else
17349 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17350 sp_reg_rtx,
17351 GEN_INT (sp_offset - info->fp_size)));
17352
17353 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17354 info->gp_save_offset, reg_mode,
17355 /*savep=*/false, /*gpr=*/true,
17356 /*exitp=*/can_use_exit);
17357
17358 if (can_use_exit)
17359 {
17360 if (info->cr_save_p)
ff35822b
JJ
17361 {
17362 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17363 using_mtcr_multiple);
17364 if (DEFAULT_ABI == ABI_V4)
17365 cfa_restores
17366 = alloc_reg_note (REG_CFA_RESTORE,
17367 gen_rtx_REG (SImode, CR2_REGNO),
17368 cfa_restores);
17369 }
f78c3290
NF
17370
17371 emit_jump_insn (par);
17372
17373 /* We don't want anybody else emitting things after we jumped
17374 back. */
17375 return;
17376 }
ff35822b
JJ
17377
17378 insn = emit_insn (par);
17379 if (DEFAULT_ABI == ABI_V4)
17380 {
17381 if (frame_pointer_needed)
17382 {
17383 add_reg_note (insn, REG_CFA_DEF_CFA,
17384 plus_constant (frame_reg_rtx, sp_offset));
17385 RTX_FRAME_RELATED_P (insn) = 1;
17386 }
17387
17388 for (i = info->first_gp_reg_save; i < 32; i++)
17389 cfa_restores
17390 = alloc_reg_note (REG_CFA_RESTORE,
17391 gen_rtx_REG (reg_mode, i), cfa_restores);
17392 }
f78c3290
NF
17393 }
17394 else if (using_load_multiple)
17395 {
17396 rtvec p;
17397 p = rtvec_alloc (32 - info->first_gp_reg_save);
17398 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17399 {
f676971a
EC
17400 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17401 GEN_INT (info->gp_save_offset
17402 + sp_offset
9ebbca7d 17403 + reg_size * i));
0be76840 17404 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17405 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
ba4828e0 17406
ff35822b
JJ
17407 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, reg, mem);
17408 if (DEFAULT_ABI == ABI_V4)
17409 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17410 cfa_restores);
17411 }
17412 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17413 if (DEFAULT_ABI == ABI_V4 && frame_pointer_needed)
17414 {
17415 add_reg_note (insn, REG_CFA_DEF_CFA,
17416 plus_constant (frame_reg_rtx, sp_offset));
17417 RTX_FRAME_RELATED_P (insn) = 1;
9ebbca7d 17418 }
f78c3290
NF
17419 }
17420 else
17421 {
17422 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17423 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17424 {
17425 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17426 GEN_INT (info->gp_save_offset
17427 + sp_offset
17428 + reg_size * i));
17429 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17430 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f78c3290 17431
ff35822b
JJ
17432 insn = emit_move_insn (reg, mem);
17433 if (DEFAULT_ABI == ABI_V4)
17434 {
17435 if (frame_pointer_needed
17436 && info->first_gp_reg_save + i
17437 == HARD_FRAME_POINTER_REGNUM)
17438 {
17439 add_reg_note (insn, REG_CFA_DEF_CFA,
17440 plus_constant (frame_reg_rtx, sp_offset));
17441 RTX_FRAME_RELATED_P (insn) = 1;
17442 }
17443
17444 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17445 cfa_restores);
17446 }
f78c3290
NF
17447 }
17448 }
9878760c 17449
9ebbca7d
GK
17450 /* Restore fpr's if we need to do it without calling a function. */
17451 if (restoring_FPRs_inline)
17452 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17453 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17454 && ! call_used_regs[info->first_fp_reg_save+i]))
17455 {
ff35822b 17456 rtx addr, mem, reg;
9ebbca7d 17457 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17458 GEN_INT (info->fp_save_offset
17459 + sp_offset
a4f6c312 17460 + 8 * i));
696e45ba
ME
17461 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17462 ? DFmode : SFmode), addr);
ff35822b
JJ
17463 reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17464 ? DFmode : SFmode),
17465 info->first_fp_reg_save + i);
9ebbca7d 17466
ff35822b
JJ
17467 emit_move_insn (reg, mem);
17468 if (DEFAULT_ABI == ABI_V4)
17469 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17470 cfa_restores);
9ebbca7d 17471 }
8d30c4ee 17472
9ebbca7d
GK
17473 /* If we saved cr, restore it here. Just those that were used. */
17474 if (info->cr_save_p)
ff35822b
JJ
17475 {
17476 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
17477 if (DEFAULT_ABI == ABI_V4)
17478 cfa_restores
17479 = alloc_reg_note (REG_CFA_RESTORE, gen_rtx_REG (SImode, CR2_REGNO),
17480 cfa_restores);
17481 }
979721f8 17482
9ebbca7d 17483 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17484 have been done. */
ff35822b
JJ
17485 insn = rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17486 sp_offset, !restoring_FPRs_inline);
17487 if (insn)
17488 {
17489 if (cfa_restores)
17490 {
17491 REG_NOTES (insn) = cfa_restores;
17492 cfa_restores = NULL_RTX;
17493 }
17494 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17495 RTX_FRAME_RELATED_P (insn) = 1;
17496 }
b6c9286a 17497
e3b5732b 17498 if (crtl->calls_eh_return)
83720594
RH
17499 {
17500 rtx sa = EH_RETURN_STACKADJ_RTX;
ff35822b 17501 emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx, sa));
83720594
RH
17502 }
17503
9ebbca7d
GK
17504 if (!sibcall)
17505 {
17506 rtvec p;
17507 if (! restoring_FPRs_inline)
f78c3290 17508 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17509 else
17510 p = rtvec_alloc (2);
b6c9286a 17511
e35b9579 17512 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17513 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17514 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17515 : gen_rtx_CLOBBER (VOIDmode,
17516 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17517
17518 /* If we have to restore more than two FP registers, branch to the
17519 restore function. It will return to our caller. */
17520 if (! restoring_FPRs_inline)
17521 {
17522 int i;
f78c3290
NF
17523 rtx sym;
17524
17525 sym = rs6000_savres_routine_sym (info,
17526 /*savep=*/false,
17527 /*gpr=*/false,
17528 /*exitp=*/true);
17529 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17530 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17531 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17532 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17533 {
17534 rtx addr, mem;
17535 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17536 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17537 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17538
f78c3290 17539 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17540 gen_rtx_SET (VOIDmode,
17541 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17542 mem);
b6c9286a
MM
17543 }
17544 }
f676971a 17545
9ebbca7d 17546 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17547 }
9878760c
RK
17548}
17549
17550/* Write function epilogue. */
17551
08c148a8 17552static void
f676971a 17553rs6000_output_function_epilogue (FILE *file,
a2369ed3 17554 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17555{
9ebbca7d 17556 if (! HAVE_epilogue)
9878760c 17557 {
9ebbca7d
GK
17558 rtx insn = get_last_insn ();
17559 /* If the last insn was a BARRIER, we don't have to write anything except
17560 the trace table. */
17561 if (GET_CODE (insn) == NOTE)
17562 insn = prev_nonnote_insn (insn);
17563 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17564 {
9ebbca7d
GK
17565 /* This is slightly ugly, but at least we don't have two
17566 copies of the epilogue-emitting code. */
17567 start_sequence ();
17568
17569 /* A NOTE_INSN_DELETED is supposed to be at the start
17570 and end of the "toplevel" insn chain. */
2e040219 17571 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17572 rs6000_emit_epilogue (FALSE);
2e040219 17573 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17574
a3c9585f 17575 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17576 {
17577 rtx insn;
17578 unsigned addr = 0;
17579 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17580 {
17581 INSN_ADDRESSES_NEW (insn, addr);
17582 addr += 4;
17583 }
17584 }
17585
9ebbca7d 17586 if (TARGET_DEBUG_STACK)
a4f6c312 17587 debug_rtx_list (get_insns (), 100);
c9d691e9 17588 final (get_insns (), file, FALSE);
9ebbca7d 17589 end_sequence ();
4697a36c 17590 }
9878760c 17591 }
b4ac57ab 17592
efdba735
SH
17593#if TARGET_MACHO
17594 macho_branch_islands ();
0e5da0be
GK
17595 /* Mach-O doesn't support labels at the end of objects, so if
17596 it looks like we might want one, insert a NOP. */
17597 {
17598 rtx insn = get_last_insn ();
17599 while (insn
17600 && NOTE_P (insn)
a38e7aa5 17601 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17602 insn = PREV_INSN (insn);
f676971a
EC
17603 if (insn
17604 && (LABEL_P (insn)
0e5da0be 17605 || (NOTE_P (insn)
a38e7aa5 17606 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17607 fputs ("\tnop\n", file);
17608 }
17609#endif
17610
9b30bae2 17611 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17612 on its format.
17613
17614 We don't output a traceback table if -finhibit-size-directive was
17615 used. The documentation for -finhibit-size-directive reads
17616 ``don't output a @code{.size} assembler directive, or anything
17617 else that would cause trouble if the function is split in the
17618 middle, and the two halves are placed at locations far apart in
17619 memory.'' The traceback table has this property, since it
17620 includes the offset from the start of the function to the
4d30c363
MM
17621 traceback table itself.
17622
17623 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17624 different traceback table. */
57ac7be9 17625 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
3c072c6b 17626 && rs6000_traceback != traceback_none && !cfun->is_thunk)
9b30bae2 17627 {
69c75916 17628 const char *fname = NULL;
3ac88239 17629 const char *language_string = lang_hooks.name;
6041bf2f 17630 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17631 int i;
57ac7be9 17632 int optional_tbtab;
8097c268 17633 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17634
17635 if (rs6000_traceback == traceback_full)
17636 optional_tbtab = 1;
17637 else if (rs6000_traceback == traceback_part)
17638 optional_tbtab = 0;
17639 else
17640 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17641
69c75916
AM
17642 if (optional_tbtab)
17643 {
17644 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17645 while (*fname == '.') /* V.4 encodes . in the name */
17646 fname++;
17647
17648 /* Need label immediately before tbtab, so we can compute
17649 its offset from the function start. */
17650 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17651 ASM_OUTPUT_LABEL (file, fname);
17652 }
314fc5a9
ILT
17653
17654 /* The .tbtab pseudo-op can only be used for the first eight
17655 expressions, since it can't handle the possibly variable
17656 length fields that follow. However, if you omit the optional
17657 fields, the assembler outputs zeros for all optional fields
17658 anyways, giving each variable length field is minimum length
17659 (as defined in sys/debug.h). Thus we can not use the .tbtab
17660 pseudo-op at all. */
17661
17662 /* An all-zero word flags the start of the tbtab, for debuggers
17663 that have to find it by searching forward from the entry
17664 point or from the current pc. */
19d2d16f 17665 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17666
17667 /* Tbtab format type. Use format type 0. */
19d2d16f 17668 fputs ("\t.byte 0,", file);
314fc5a9 17669
5fc921c1
DE
17670 /* Language type. Unfortunately, there does not seem to be any
17671 official way to discover the language being compiled, so we
17672 use language_string.
17673 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17674 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17675 a number, so for now use 9. */
5fc921c1 17676 if (! strcmp (language_string, "GNU C"))
314fc5a9 17677 i = 0;
6de9cd9a 17678 else if (! strcmp (language_string, "GNU F77")
7f62878c 17679 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17680 i = 1;
8b83775b 17681 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17682 i = 2;
5fc921c1
DE
17683 else if (! strcmp (language_string, "GNU Ada"))
17684 i = 3;
56438901
AM
17685 else if (! strcmp (language_string, "GNU C++")
17686 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17687 i = 9;
9517ead8
AG
17688 else if (! strcmp (language_string, "GNU Java"))
17689 i = 13;
5fc921c1
DE
17690 else if (! strcmp (language_string, "GNU Objective-C"))
17691 i = 14;
314fc5a9 17692 else
37409796 17693 gcc_unreachable ();
314fc5a9
ILT
17694 fprintf (file, "%d,", i);
17695
17696 /* 8 single bit fields: global linkage (not set for C extern linkage,
17697 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17698 from start of procedure stored in tbtab, internal function, function
17699 has controlled storage, function has no toc, function uses fp,
17700 function logs/aborts fp operations. */
17701 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17702 fprintf (file, "%d,",
17703 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17704
17705 /* 6 bitfields: function is interrupt handler, name present in
17706 proc table, function calls alloca, on condition directives
17707 (controls stack walks, 3 bits), saves condition reg, saves
17708 link reg. */
17709 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17710 set up as a frame pointer, even when there is no alloca call. */
17711 fprintf (file, "%d,",
6041bf2f
DE
17712 ((optional_tbtab << 6)
17713 | ((optional_tbtab & frame_pointer_needed) << 5)
17714 | (info->cr_save_p << 1)
17715 | (info->lr_save_p)));
314fc5a9 17716
6041bf2f 17717 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17718 (6 bits). */
17719 fprintf (file, "%d,",
4697a36c 17720 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17721
17722 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17723 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17724
6041bf2f
DE
17725 if (optional_tbtab)
17726 {
17727 /* Compute the parameter info from the function decl argument
17728 list. */
17729 tree decl;
17730 int next_parm_info_bit = 31;
314fc5a9 17731
6041bf2f
DE
17732 for (decl = DECL_ARGUMENTS (current_function_decl);
17733 decl; decl = TREE_CHAIN (decl))
17734 {
17735 rtx parameter = DECL_INCOMING_RTL (decl);
17736 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17737
6041bf2f
DE
17738 if (GET_CODE (parameter) == REG)
17739 {
ebb109ad 17740 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17741 {
17742 int bits;
17743
17744 float_parms++;
17745
37409796
NS
17746 switch (mode)
17747 {
17748 case SFmode:
e41b2a33 17749 case SDmode:
37409796
NS
17750 bits = 0x2;
17751 break;
17752
17753 case DFmode:
7393f7f8 17754 case DDmode:
37409796 17755 case TFmode:
7393f7f8 17756 case TDmode:
37409796
NS
17757 bits = 0x3;
17758 break;
17759
17760 default:
17761 gcc_unreachable ();
17762 }
6041bf2f
DE
17763
17764 /* If only one bit will fit, don't or in this entry. */
17765 if (next_parm_info_bit > 0)
17766 parm_info |= (bits << (next_parm_info_bit - 1));
17767 next_parm_info_bit -= 2;
17768 }
17769 else
17770 {
17771 fixed_parms += ((GET_MODE_SIZE (mode)
17772 + (UNITS_PER_WORD - 1))
17773 / UNITS_PER_WORD);
17774 next_parm_info_bit -= 1;
17775 }
17776 }
17777 }
17778 }
314fc5a9
ILT
17779
17780 /* Number of fixed point parameters. */
17781 /* This is actually the number of words of fixed point parameters; thus
17782 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17783 fprintf (file, "%d,", fixed_parms);
17784
17785 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17786 all on stack. */
17787 /* This is actually the number of fp registers that hold parameters;
17788 and thus the maximum value is 13. */
17789 /* Set parameters on stack bit if parameters are not in their original
17790 registers, regardless of whether they are on the stack? Xlc
17791 seems to set the bit when not optimizing. */
17792 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17793
6041bf2f
DE
17794 if (! optional_tbtab)
17795 return;
17796
314fc5a9
ILT
17797 /* Optional fields follow. Some are variable length. */
17798
17799 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17800 11 double float. */
17801 /* There is an entry for each parameter in a register, in the order that
17802 they occur in the parameter list. Any intervening arguments on the
17803 stack are ignored. If the list overflows a long (max possible length
17804 34 bits) then completely leave off all elements that don't fit. */
17805 /* Only emit this long if there was at least one parameter. */
17806 if (fixed_parms || float_parms)
17807 fprintf (file, "\t.long %d\n", parm_info);
17808
17809 /* Offset from start of code to tb table. */
19d2d16f 17810 fputs ("\t.long ", file);
314fc5a9 17811 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17812 if (TARGET_AIX)
17813 RS6000_OUTPUT_BASENAME (file, fname);
17814 else
17815 assemble_name (file, fname);
17816 putc ('-', file);
17817 rs6000_output_function_entry (file, fname);
19d2d16f 17818 putc ('\n', file);
314fc5a9
ILT
17819
17820 /* Interrupt handler mask. */
17821 /* Omit this long, since we never set the interrupt handler bit
17822 above. */
17823
17824 /* Number of CTL (controlled storage) anchors. */
17825 /* Omit this long, since the has_ctl bit is never set above. */
17826
17827 /* Displacement into stack of each CTL anchor. */
17828 /* Omit this list of longs, because there are no CTL anchors. */
17829
17830 /* Length of function name. */
69c75916
AM
17831 if (*fname == '*')
17832 ++fname;
296b8152 17833 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17834
17835 /* Function name. */
17836 assemble_string (fname, strlen (fname));
17837
17838 /* Register for alloca automatic storage; this is always reg 31.
17839 Only emit this if the alloca bit was set above. */
17840 if (frame_pointer_needed)
19d2d16f 17841 fputs ("\t.byte 31\n", file);
b1765bde
DE
17842
17843 fputs ("\t.align 2\n", file);
9b30bae2 17844 }
9878760c 17845}
17167fd8 17846\f
a4f6c312
SS
17847/* A C compound statement that outputs the assembler code for a thunk
17848 function, used to implement C++ virtual function calls with
17849 multiple inheritance. The thunk acts as a wrapper around a virtual
17850 function, adjusting the implicit object parameter before handing
17851 control off to the real function.
17852
17853 First, emit code to add the integer DELTA to the location that
17854 contains the incoming first argument. Assume that this argument
17855 contains a pointer, and is the one used to pass the `this' pointer
17856 in C++. This is the incoming argument *before* the function
17857 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17858 values of all other incoming arguments.
17167fd8
MM
17859
17860 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17861 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17862 not touch the return address. Hence returning from FUNCTION will
17863 return to whoever called the current `thunk'.
17167fd8 17864
a4f6c312
SS
17865 The effect must be as if FUNCTION had been called directly with the
17866 adjusted first argument. This macro is responsible for emitting
17867 all of the code for a thunk function; output_function_prologue()
17868 and output_function_epilogue() are not invoked.
17167fd8 17869
a4f6c312
SS
17870 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17871 been extracted from it.) It might possibly be useful on some
17872 targets, but probably not.
17167fd8 17873
a4f6c312
SS
17874 If you do not define this macro, the target-independent code in the
17875 C++ frontend will generate a less efficient heavyweight thunk that
17876 calls FUNCTION instead of jumping to it. The generic approach does
17877 not support varargs. */
17167fd8 17878
3961e8fe 17879static void
f676971a
EC
17880rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17881 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17882 tree function)
17167fd8 17883{
0a2aaacc 17884 rtx this_rtx, insn, funexp;
17167fd8 17885
5b71a4e7 17886 reload_completed = 1;
fe3ad572 17887 epilogue_completed = 1;
56a7189a 17888
5b71a4e7 17889 /* Mark the end of the (empty) prologue. */
2e040219 17890 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17891
5b71a4e7
DE
17892 /* Find the "this" pointer. If the function returns a structure,
17893 the structure return pointer is in r3. */
61f71b34 17894 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17895 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17896 else
0a2aaacc 17897 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17898
5b71a4e7
DE
17899 /* Apply the constant offset, if required. */
17900 if (delta)
17901 {
17902 rtx delta_rtx = GEN_INT (delta);
17903 emit_insn (TARGET_32BIT
0a2aaacc
KG
17904 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17905 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17906 }
17907
5b71a4e7
DE
17908 /* Apply the offset from the vtable, if required. */
17909 if (vcall_offset)
17167fd8 17910 {
5b71a4e7
DE
17911 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17912 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17913
0a2aaacc 17914 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17915 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17916 {
17917 emit_insn (TARGET_32BIT
17918 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17919 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17920 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17921 }
17922 else
17923 {
17924 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17925
17926 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17927 }
5b71a4e7 17928 emit_insn (TARGET_32BIT
0a2aaacc
KG
17929 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17930 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17931 }
17932
5b71a4e7
DE
17933 /* Generate a tail call to the target function. */
17934 if (!TREE_USED (function))
17935 {
17936 assemble_external (function);
17937 TREE_USED (function) = 1;
17938 }
17939 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17940 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17941
17942#if TARGET_MACHO
ab82a49f 17943 if (MACHOPIC_INDIRECT)
5b71a4e7 17944 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17945#endif
5b71a4e7
DE
17946
17947 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17948 generate sibcall RTL explicitly. */
5b71a4e7
DE
17949 insn = emit_call_insn (
17950 gen_rtx_PARALLEL (VOIDmode,
17951 gen_rtvec (4,
17952 gen_rtx_CALL (VOIDmode,
17953 funexp, const0_rtx),
17954 gen_rtx_USE (VOIDmode, const0_rtx),
17955 gen_rtx_USE (VOIDmode,
17956 gen_rtx_REG (SImode,
1de43f85 17957 LR_REGNO)),
5b71a4e7
DE
17958 gen_rtx_RETURN (VOIDmode))));
17959 SIBLING_CALL_P (insn) = 1;
17960 emit_barrier ();
17961
17962 /* Run just enough of rest_of_compilation to get the insns emitted.
17963 There's not really enough bulk here to make other passes such as
17964 instruction scheduling worth while. Note that use_thunk calls
17965 assemble_start_function and assemble_end_function. */
17966 insn = get_insns ();
55e092c4 17967 insn_locators_alloc ();
5b71a4e7
DE
17968 shorten_branches (insn);
17969 final_start_function (insn, file, 1);
c9d691e9 17970 final (insn, file, 1);
5b71a4e7 17971 final_end_function ();
d7087dd2 17972 free_after_compilation (cfun);
5b71a4e7
DE
17973
17974 reload_completed = 0;
fe3ad572 17975 epilogue_completed = 0;
9ebbca7d 17976}
9ebbca7d
GK
17977\f
17978/* A quick summary of the various types of 'constant-pool tables'
17979 under PowerPC:
17980
f676971a 17981 Target Flags Name One table per
9ebbca7d
GK
17982 AIX (none) AIX TOC object file
17983 AIX -mfull-toc AIX TOC object file
17984 AIX -mminimal-toc AIX minimal TOC translation unit
17985 SVR4/EABI (none) SVR4 SDATA object file
17986 SVR4/EABI -fpic SVR4 pic object file
17987 SVR4/EABI -fPIC SVR4 PIC translation unit
17988 SVR4/EABI -mrelocatable EABI TOC function
17989 SVR4/EABI -maix AIX TOC object file
f676971a 17990 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17991 AIX minimal TOC translation unit
17992
17993 Name Reg. Set by entries contains:
17994 made by addrs? fp? sum?
17995
17996 AIX TOC 2 crt0 as Y option option
17997 AIX minimal TOC 30 prolog gcc Y Y option
17998 SVR4 SDATA 13 crt0 gcc N Y N
17999 SVR4 pic 30 prolog ld Y not yet N
18000 SVR4 PIC 30 prolog gcc Y option option
18001 EABI TOC 30 prolog gcc Y option option
18002
18003*/
18004
9ebbca7d
GK
18005/* Hash functions for the hash table. */
18006
18007static unsigned
a2369ed3 18008rs6000_hash_constant (rtx k)
9ebbca7d 18009{
46b33600
RH
18010 enum rtx_code code = GET_CODE (k);
18011 enum machine_mode mode = GET_MODE (k);
18012 unsigned result = (code << 3) ^ mode;
18013 const char *format;
18014 int flen, fidx;
f676971a 18015
46b33600
RH
18016 format = GET_RTX_FORMAT (code);
18017 flen = strlen (format);
18018 fidx = 0;
9ebbca7d 18019
46b33600
RH
18020 switch (code)
18021 {
18022 case LABEL_REF:
18023 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
18024
18025 case CONST_DOUBLE:
18026 if (mode != VOIDmode)
18027 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
18028 flen = 2;
18029 break;
18030
18031 case CODE_LABEL:
18032 fidx = 3;
18033 break;
18034
18035 default:
18036 break;
18037 }
9ebbca7d
GK
18038
18039 for (; fidx < flen; fidx++)
18040 switch (format[fidx])
18041 {
18042 case 's':
18043 {
18044 unsigned i, len;
18045 const char *str = XSTR (k, fidx);
18046 len = strlen (str);
18047 result = result * 613 + len;
18048 for (i = 0; i < len; i++)
18049 result = result * 613 + (unsigned) str[i];
17167fd8
MM
18050 break;
18051 }
9ebbca7d
GK
18052 case 'u':
18053 case 'e':
18054 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
18055 break;
18056 case 'i':
18057 case 'n':
18058 result = result * 613 + (unsigned) XINT (k, fidx);
18059 break;
18060 case 'w':
18061 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
18062 result = result * 613 + (unsigned) XWINT (k, fidx);
18063 else
18064 {
18065 size_t i;
9390387d 18066 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
18067 result = result * 613 + (unsigned) (XWINT (k, fidx)
18068 >> CHAR_BIT * i);
18069 }
18070 break;
09501938
DE
18071 case '0':
18072 break;
9ebbca7d 18073 default:
37409796 18074 gcc_unreachable ();
9ebbca7d 18075 }
46b33600 18076
9ebbca7d
GK
18077 return result;
18078}
18079
18080static unsigned
a2369ed3 18081toc_hash_function (const void *hash_entry)
9ebbca7d 18082{
f676971a 18083 const struct toc_hash_struct *thc =
a9098fd0
GK
18084 (const struct toc_hash_struct *) hash_entry;
18085 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18086}
18087
18088/* Compare H1 and H2 for equivalence. */
18089
18090static int
a2369ed3 18091toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18092{
18093 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18094 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18095
a9098fd0
GK
18096 if (((const struct toc_hash_struct *) h1)->key_mode
18097 != ((const struct toc_hash_struct *) h2)->key_mode)
18098 return 0;
18099
5692c7bc 18100 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18101}
18102
28e510bd
MM
18103/* These are the names given by the C++ front-end to vtables, and
18104 vtable-like objects. Ideally, this logic should not be here;
18105 instead, there should be some programmatic way of inquiring as
18106 to whether or not an object is a vtable. */
18107
18108#define VTABLE_NAME_P(NAME) \
9390387d 18109 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18110 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18111 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18112 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18113 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd 18114
ee06c6a5
DE
18115#ifdef NO_DOLLAR_IN_LABEL
18116/* Return a GGC-allocated character string translating dollar signs in
18117 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18118
18119const char *
18120rs6000_xcoff_strip_dollar (const char *name)
18121{
18122 char *strip, *p;
18123 int len;
18124
18125 p = strchr (name, '$');
18126
18127 if (p == 0 || p == name)
18128 return name;
18129
18130 len = strlen (name);
18131 strip = (char *) alloca (len + 1);
18132 strcpy (strip, name);
18133 p = strchr (strip, '$');
18134 while (p)
18135 {
18136 *p = '_';
18137 p = strchr (p + 1, '$');
18138 }
18139
18140 return ggc_alloc_string (strip, len);
18141}
18142#endif
18143
28e510bd 18144void
a2369ed3 18145rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18146{
18147 /* Currently C++ toc references to vtables can be emitted before it
18148 is decided whether the vtable is public or private. If this is
18149 the case, then the linker will eventually complain that there is
f676971a 18150 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18151 we emit the TOC reference to reference the symbol and not the
18152 section. */
18153 const char *name = XSTR (x, 0);
54ee9799 18154
f676971a 18155 if (VTABLE_NAME_P (name))
54ee9799
DE
18156 {
18157 RS6000_OUTPUT_BASENAME (file, name);
18158 }
18159 else
18160 assemble_name (file, name);
28e510bd
MM
18161}
18162
a4f6c312
SS
18163/* Output a TOC entry. We derive the entry name from what is being
18164 written. */
9878760c
RK
18165
18166void
a2369ed3 18167output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18168{
18169 char buf[256];
3cce094d 18170 const char *name = buf;
9878760c 18171 rtx base = x;
16fdeb48 18172 HOST_WIDE_INT offset = 0;
9878760c 18173
37409796 18174 gcc_assert (!TARGET_NO_TOC);
4697a36c 18175
9ebbca7d
GK
18176 /* When the linker won't eliminate them, don't output duplicate
18177 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18178 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18179 CODE_LABELs. */
18180 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18181 {
18182 struct toc_hash_struct *h;
18183 void * * found;
f676971a 18184
17211ab5 18185 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18186 time because GGC is not initialized at that point. */
17211ab5 18187 if (toc_hash_table == NULL)
f676971a 18188 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18189 toc_hash_eq, NULL);
18190
5ead67f6 18191 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18192 h->key = x;
a9098fd0 18193 h->key_mode = mode;
9ebbca7d 18194 h->labelno = labelno;
f676971a 18195
bbbbb16a 18196 found = htab_find_slot (toc_hash_table, h, INSERT);
9ebbca7d
GK
18197 if (*found == NULL)
18198 *found = h;
f676971a 18199 else /* This is indeed a duplicate.
9ebbca7d
GK
18200 Set this label equal to that label. */
18201 {
18202 fputs ("\t.set ", file);
18203 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18204 fprintf (file, "%d,", labelno);
18205 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18206 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18207 found)->labelno));
18208 return;
18209 }
18210 }
18211
18212 /* If we're going to put a double constant in the TOC, make sure it's
18213 aligned properly when strict alignment is on. */
ff1720ed
RK
18214 if (GET_CODE (x) == CONST_DOUBLE
18215 && STRICT_ALIGNMENT
a9098fd0 18216 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18217 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18218 ASM_OUTPUT_ALIGN (file, 3);
18219 }
18220
4977bab6 18221 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18222
37c37a57
RK
18223 /* Handle FP constants specially. Note that if we have a minimal
18224 TOC, things we put here aren't actually in the TOC, so we can allow
18225 FP constants. */
00b79d54
BE
18226 if (GET_CODE (x) == CONST_DOUBLE &&
18227 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18228 {
18229 REAL_VALUE_TYPE rv;
18230 long k[4];
18231
18232 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18233 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18234 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18235 else
18236 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18237
18238 if (TARGET_64BIT)
18239 {
18240 if (TARGET_MINIMAL_TOC)
18241 fputs (DOUBLE_INT_ASM_OP, file);
18242 else
18243 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18244 k[0] & 0xffffffff, k[1] & 0xffffffff,
18245 k[2] & 0xffffffff, k[3] & 0xffffffff);
18246 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18247 k[0] & 0xffffffff, k[1] & 0xffffffff,
18248 k[2] & 0xffffffff, k[3] & 0xffffffff);
18249 return;
18250 }
18251 else
18252 {
18253 if (TARGET_MINIMAL_TOC)
18254 fputs ("\t.long ", file);
18255 else
18256 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18257 k[0] & 0xffffffff, k[1] & 0xffffffff,
18258 k[2] & 0xffffffff, k[3] & 0xffffffff);
18259 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18260 k[0] & 0xffffffff, k[1] & 0xffffffff,
18261 k[2] & 0xffffffff, k[3] & 0xffffffff);
18262 return;
18263 }
18264 }
00b79d54
BE
18265 else if (GET_CODE (x) == CONST_DOUBLE &&
18266 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18267 {
042259f2
DE
18268 REAL_VALUE_TYPE rv;
18269 long k[2];
0adc764e 18270
042259f2 18271 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18272
18273 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18274 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18275 else
18276 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18277
13ded975
DE
18278 if (TARGET_64BIT)
18279 {
18280 if (TARGET_MINIMAL_TOC)
2bfcf297 18281 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18282 else
2f0552b6
AM
18283 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18284 k[0] & 0xffffffff, k[1] & 0xffffffff);
18285 fprintf (file, "0x%lx%08lx\n",
18286 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18287 return;
18288 }
1875cc88 18289 else
13ded975
DE
18290 {
18291 if (TARGET_MINIMAL_TOC)
2bfcf297 18292 fputs ("\t.long ", file);
13ded975 18293 else
2f0552b6
AM
18294 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18295 k[0] & 0xffffffff, k[1] & 0xffffffff);
18296 fprintf (file, "0x%lx,0x%lx\n",
18297 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18298 return;
18299 }
9878760c 18300 }
00b79d54
BE
18301 else if (GET_CODE (x) == CONST_DOUBLE &&
18302 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18303 {
042259f2
DE
18304 REAL_VALUE_TYPE rv;
18305 long l;
9878760c 18306
042259f2 18307 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18308 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18309 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18310 else
18311 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18312
31bfaa0b
DE
18313 if (TARGET_64BIT)
18314 {
18315 if (TARGET_MINIMAL_TOC)
2bfcf297 18316 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18317 else
2f0552b6
AM
18318 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18319 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18320 return;
18321 }
042259f2 18322 else
31bfaa0b
DE
18323 {
18324 if (TARGET_MINIMAL_TOC)
2bfcf297 18325 fputs ("\t.long ", file);
31bfaa0b 18326 else
2f0552b6
AM
18327 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18328 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18329 return;
18330 }
042259f2 18331 }
f176e826 18332 else if (GET_MODE (x) == VOIDmode
a9098fd0 18333 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18334 {
e2c953b6 18335 unsigned HOST_WIDE_INT low;
042259f2
DE
18336 HOST_WIDE_INT high;
18337
18338 if (GET_CODE (x) == CONST_DOUBLE)
18339 {
18340 low = CONST_DOUBLE_LOW (x);
18341 high = CONST_DOUBLE_HIGH (x);
18342 }
18343 else
18344#if HOST_BITS_PER_WIDE_INT == 32
18345 {
18346 low = INTVAL (x);
0858c623 18347 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18348 }
18349#else
18350 {
c4ad648e
AM
18351 low = INTVAL (x) & 0xffffffff;
18352 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18353 }
18354#endif
9878760c 18355
a9098fd0
GK
18356 /* TOC entries are always Pmode-sized, but since this
18357 is a bigendian machine then if we're putting smaller
18358 integer constants in the TOC we have to pad them.
18359 (This is still a win over putting the constants in
18360 a separate constant pool, because then we'd have
02a4ec28
FS
18361 to have both a TOC entry _and_ the actual constant.)
18362
18363 For a 32-bit target, CONST_INT values are loaded and shifted
18364 entirely within `low' and can be stored in one TOC entry. */
18365
37409796
NS
18366 /* It would be easy to make this work, but it doesn't now. */
18367 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18368
18369 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18370 {
18371#if HOST_BITS_PER_WIDE_INT == 32
18372 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18373 POINTER_SIZE, &low, &high, 0);
18374#else
18375 low |= high << 32;
18376 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18377 high = (HOST_WIDE_INT) low >> 32;
18378 low &= 0xffffffff;
18379#endif
18380 }
a9098fd0 18381
13ded975
DE
18382 if (TARGET_64BIT)
18383 {
18384 if (TARGET_MINIMAL_TOC)
2bfcf297 18385 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18386 else
2f0552b6
AM
18387 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18388 (long) high & 0xffffffff, (long) low & 0xffffffff);
18389 fprintf (file, "0x%lx%08lx\n",
18390 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18391 return;
18392 }
1875cc88 18393 else
13ded975 18394 {
02a4ec28
FS
18395 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18396 {
18397 if (TARGET_MINIMAL_TOC)
2bfcf297 18398 fputs ("\t.long ", file);
02a4ec28 18399 else
2bfcf297 18400 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18401 (long) high & 0xffffffff, (long) low & 0xffffffff);
18402 fprintf (file, "0x%lx,0x%lx\n",
18403 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18404 }
13ded975 18405 else
02a4ec28
FS
18406 {
18407 if (TARGET_MINIMAL_TOC)
2bfcf297 18408 fputs ("\t.long ", file);
02a4ec28 18409 else
2f0552b6
AM
18410 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18411 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18412 }
13ded975
DE
18413 return;
18414 }
9878760c
RK
18415 }
18416
18417 if (GET_CODE (x) == CONST)
18418 {
37409796 18419 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18420
9878760c
RK
18421 base = XEXP (XEXP (x, 0), 0);
18422 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18423 }
f676971a 18424
37409796
NS
18425 switch (GET_CODE (base))
18426 {
18427 case SYMBOL_REF:
18428 name = XSTR (base, 0);
18429 break;
18430
18431 case LABEL_REF:
18432 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18433 CODE_LABEL_NUMBER (XEXP (base, 0)));
18434 break;
18435
18436 case CODE_LABEL:
18437 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18438 break;
18439
18440 default:
18441 gcc_unreachable ();
18442 }
9878760c 18443
1875cc88 18444 if (TARGET_MINIMAL_TOC)
2bfcf297 18445 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18446 else
18447 {
5773a50f
DE
18448 fputs ("\t.tc ", file);
18449 RS6000_OUTPUT_BASENAME (file, name);
9878760c 18450
1875cc88 18451 if (offset < 0)
16fdeb48 18452 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18453 else if (offset)
16fdeb48 18454 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18455
19d2d16f 18456 fputs ("[TC],", file);
1875cc88 18457 }
581bc4de
MM
18458
18459 /* Currently C++ toc references to vtables can be emitted before it
18460 is decided whether the vtable is public or private. If this is
18461 the case, then the linker will eventually complain that there is
18462 a TOC reference to an unknown section. Thus, for vtables only,
18463 we emit the TOC reference to reference the symbol and not the
18464 section. */
28e510bd 18465 if (VTABLE_NAME_P (name))
581bc4de 18466 {
54ee9799 18467 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18468 if (offset < 0)
16fdeb48 18469 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18470 else if (offset > 0)
16fdeb48 18471 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18472 }
18473 else
18474 output_addr_const (file, x);
19d2d16f 18475 putc ('\n', file);
9878760c
RK
18476}
18477\f
18478/* Output an assembler pseudo-op to write an ASCII string of N characters
18479 starting at P to FILE.
18480
18481 On the RS/6000, we have to do this using the .byte operation and
18482 write out special characters outside the quoted string.
18483 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18484 so we must artificially break them up early. */
9878760c
RK
18485
18486void
a2369ed3 18487output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18488{
18489 char c;
18490 int i, count_string;
d330fd93
KG
18491 const char *for_string = "\t.byte \"";
18492 const char *for_decimal = "\t.byte ";
18493 const char *to_close = NULL;
9878760c
RK
18494
18495 count_string = 0;
18496 for (i = 0; i < n; i++)
18497 {
18498 c = *p++;
18499 if (c >= ' ' && c < 0177)
18500 {
18501 if (for_string)
18502 fputs (for_string, file);
18503 putc (c, file);
18504
18505 /* Write two quotes to get one. */
18506 if (c == '"')
18507 {
18508 putc (c, file);
18509 ++count_string;
18510 }
18511
18512 for_string = NULL;
18513 for_decimal = "\"\n\t.byte ";
18514 to_close = "\"\n";
18515 ++count_string;
18516
18517 if (count_string >= 512)
18518 {
18519 fputs (to_close, file);
18520
18521 for_string = "\t.byte \"";
18522 for_decimal = "\t.byte ";
18523 to_close = NULL;
18524 count_string = 0;
18525 }
18526 }
18527 else
18528 {
18529 if (for_decimal)
18530 fputs (for_decimal, file);
18531 fprintf (file, "%d", c);
18532
18533 for_string = "\n\t.byte \"";
18534 for_decimal = ", ";
18535 to_close = "\n";
18536 count_string = 0;
18537 }
18538 }
18539
18540 /* Now close the string if we have written one. Then end the line. */
18541 if (to_close)
9ebbca7d 18542 fputs (to_close, file);
9878760c
RK
18543}
18544\f
18545/* Generate a unique section name for FILENAME for a section type
18546 represented by SECTION_DESC. Output goes into BUF.
18547
18548 SECTION_DESC can be any string, as long as it is different for each
18549 possible section type.
18550
18551 We name the section in the same manner as xlc. The name begins with an
18552 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18553 names) with the last period replaced by the string SECTION_DESC. If
18554 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18555 the name. */
9878760c
RK
18556
18557void
f676971a 18558rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18559 const char *section_desc)
9878760c 18560{
9ebbca7d 18561 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18562 char *p;
18563 int len;
9878760c
RK
18564
18565 after_last_slash = filename;
18566 for (q = filename; *q; q++)
11e5fe42
RK
18567 {
18568 if (*q == '/')
18569 after_last_slash = q + 1;
18570 else if (*q == '.')
18571 last_period = q;
18572 }
9878760c 18573
11e5fe42 18574 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18575 *buf = (char *) xmalloc (len);
9878760c
RK
18576
18577 p = *buf;
18578 *p++ = '_';
18579
18580 for (q = after_last_slash; *q; q++)
18581 {
11e5fe42 18582 if (q == last_period)
c4ad648e 18583 {
9878760c
RK
18584 strcpy (p, section_desc);
18585 p += strlen (section_desc);
e3981aab 18586 break;
c4ad648e 18587 }
9878760c 18588
e9a780ec 18589 else if (ISALNUM (*q))
c4ad648e 18590 *p++ = *q;
9878760c
RK
18591 }
18592
11e5fe42 18593 if (last_period == 0)
9878760c
RK
18594 strcpy (p, section_desc);
18595 else
18596 *p = '\0';
18597}
e165f3f0 18598\f
a4f6c312 18599/* Emit profile function. */
411707f4 18600
411707f4 18601void
a2369ed3 18602output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18603{
858081ad
AH
18604 /* Non-standard profiling for kernels, which just saves LR then calls
18605 _mcount without worrying about arg saves. The idea is to change
18606 the function prologue as little as possible as it isn't easy to
18607 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18608 if (TARGET_PROFILE_KERNEL)
18609 return;
18610
8480e480
CC
18611 if (DEFAULT_ABI == ABI_AIX)
18612 {
9739c90c
JJ
18613#ifndef NO_PROFILE_COUNTERS
18614# define NO_PROFILE_COUNTERS 0
18615#endif
f676971a 18616 if (NO_PROFILE_COUNTERS)
bbbbb16a
ILT
18617 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18618 LCT_NORMAL, VOIDmode, 0);
9739c90c
JJ
18619 else
18620 {
18621 char buf[30];
18622 const char *label_name;
18623 rtx fun;
411707f4 18624
9739c90c
JJ
18625 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18626 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18627 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18628
bbbbb16a
ILT
18629 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18630 LCT_NORMAL, VOIDmode, 1, fun, Pmode);
9739c90c 18631 }
8480e480 18632 }
ee890fe2
SS
18633 else if (DEFAULT_ABI == ABI_DARWIN)
18634 {
d5fa86ba 18635 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18636 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18637
18638 /* Be conservative and always set this, at least for now. */
e3b5732b 18639 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18640
18641#if TARGET_MACHO
18642 /* For PIC code, set up a stub and collect the caller's address
18643 from r0, which is where the prologue puts it. */
11abc112 18644 if (MACHOPIC_INDIRECT
e3b5732b 18645 && crtl->uses_pic_offset_table)
11abc112 18646 caller_addr_regno = 0;
ee890fe2
SS
18647#endif
18648 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
bbbbb16a 18649 LCT_NORMAL, VOIDmode, 1,
ee890fe2
SS
18650 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18651 }
411707f4
CC
18652}
18653
a4f6c312 18654/* Write function profiler code. */
e165f3f0
RK
18655
18656void
a2369ed3 18657output_function_profiler (FILE *file, int labelno)
e165f3f0 18658{
3daf36a4 18659 char buf[100];
e165f3f0 18660
38c1f2d7 18661 switch (DEFAULT_ABI)
3daf36a4 18662 {
38c1f2d7 18663 default:
37409796 18664 gcc_unreachable ();
38c1f2d7
MM
18665
18666 case ABI_V4:
09eeeacb
AM
18667 if (!TARGET_32BIT)
18668 {
d4ee4d25 18669 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18670 return;
18671 }
ffcfcb5f 18672 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18673 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18674 if (NO_PROFILE_COUNTERS)
18675 {
18676 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18677 reg_names[0], reg_names[1]);
18678 }
18679 else if (TARGET_SECURE_PLT && flag_pic)
18680 {
18681 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18682 reg_names[0], reg_names[1]);
18683 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18684 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18685 reg_names[12], reg_names[12]);
18686 assemble_name (file, buf);
18687 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18688 assemble_name (file, buf);
18689 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18690 }
18691 else if (flag_pic == 1)
38c1f2d7 18692 {
dfdfa60f 18693 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18694 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18695 reg_names[0], reg_names[1]);
17167fd8 18696 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18697 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18698 assemble_name (file, buf);
17167fd8 18699 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18700 }
9ebbca7d 18701 else if (flag_pic > 1)
38c1f2d7 18702 {
71625f3d
AM
18703 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18704 reg_names[0], reg_names[1]);
9ebbca7d 18705 /* Now, we need to get the address of the label. */
71625f3d 18706 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18707 assemble_name (file, buf);
9ebbca7d
GK
18708 fputs ("-.\n1:", file);
18709 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18710 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18711 reg_names[0], reg_names[11]);
18712 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18713 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18714 }
38c1f2d7
MM
18715 else
18716 {
17167fd8 18717 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18718 assemble_name (file, buf);
dfdfa60f 18719 fputs ("@ha\n", file);
71625f3d
AM
18720 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18721 reg_names[0], reg_names[1]);
a260abc9 18722 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18723 assemble_name (file, buf);
17167fd8 18724 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18725 }
18726
50d440bc 18727 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18728 fprintf (file, "\tbl %s%s\n",
18729 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18730 break;
18731
18732 case ABI_AIX:
ee890fe2 18733 case ABI_DARWIN:
ffcfcb5f
AM
18734 if (!TARGET_PROFILE_KERNEL)
18735 {
a3c9585f 18736 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18737 }
18738 else
18739 {
37409796 18740 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18741
18742 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18743 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18744
6de9cd9a 18745 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18746 {
18747 asm_fprintf (file, "\tstd %s,24(%s)\n",
18748 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18749 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18750 asm_fprintf (file, "\tld %s,24(%s)\n",
18751 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18752 }
18753 else
18754 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18755 }
38c1f2d7
MM
18756 break;
18757 }
e165f3f0 18758}
a251ffd0 18759
b54cf83a 18760\f
44cd321e
PS
18761
18762/* The following variable value is the last issued insn. */
18763
18764static rtx last_scheduled_insn;
18765
18766/* The following variable helps to balance issuing of load and
18767 store instructions */
18768
18769static int load_store_pendulum;
18770
b54cf83a
DE
18771/* Power4 load update and store update instructions are cracked into a
18772 load or store and an integer insn which are executed in the same cycle.
18773 Branches have their own dispatch slot which does not count against the
18774 GCC issue rate, but it changes the program flow so there are no other
18775 instructions to issue in this cycle. */
18776
18777static int
f676971a
EC
18778rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18779 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18780 rtx insn, int more)
b54cf83a 18781{
44cd321e 18782 last_scheduled_insn = insn;
b54cf83a
DE
18783 if (GET_CODE (PATTERN (insn)) == USE
18784 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18785 {
18786 cached_can_issue_more = more;
18787 return cached_can_issue_more;
18788 }
18789
18790 if (insn_terminates_group_p (insn, current_group))
18791 {
18792 cached_can_issue_more = 0;
18793 return cached_can_issue_more;
18794 }
b54cf83a 18795
d296e02e
AP
18796 /* If no reservation, but reach here */
18797 if (recog_memoized (insn) < 0)
18798 return more;
18799
ec507f2d 18800 if (rs6000_sched_groups)
b54cf83a 18801 {
cbe26ab8 18802 if (is_microcoded_insn (insn))
44cd321e 18803 cached_can_issue_more = 0;
cbe26ab8 18804 else if (is_cracked_insn (insn))
44cd321e
PS
18805 cached_can_issue_more = more > 2 ? more - 2 : 0;
18806 else
18807 cached_can_issue_more = more - 1;
18808
18809 return cached_can_issue_more;
b54cf83a 18810 }
165b263e 18811
d296e02e
AP
18812 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18813 return 0;
18814
44cd321e
PS
18815 cached_can_issue_more = more - 1;
18816 return cached_can_issue_more;
b54cf83a
DE
18817}
18818
a251ffd0
TG
18819/* Adjust the cost of a scheduling dependency. Return the new cost of
18820 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18821
c237e94a 18822static int
0a4f0294 18823rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18824{
44cd321e 18825 enum attr_type attr_type;
a251ffd0 18826
44cd321e 18827 if (! recog_memoized (insn))
a251ffd0
TG
18828 return 0;
18829
44cd321e 18830 switch (REG_NOTE_KIND (link))
a251ffd0 18831 {
44cd321e
PS
18832 case REG_DEP_TRUE:
18833 {
18834 /* Data dependency; DEP_INSN writes a register that INSN reads
18835 some cycles later. */
18836
18837 /* Separate a load from a narrower, dependent store. */
18838 if (rs6000_sched_groups
18839 && GET_CODE (PATTERN (insn)) == SET
18840 && GET_CODE (PATTERN (dep_insn)) == SET
18841 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18842 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18843 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18844 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18845 return cost + 14;
18846
18847 attr_type = get_attr_type (insn);
18848
18849 switch (attr_type)
18850 {
18851 case TYPE_JMPREG:
18852 /* Tell the first scheduling pass about the latency between
18853 a mtctr and bctr (and mtlr and br/blr). The first
18854 scheduling pass will not know about this latency since
18855 the mtctr instruction, which has the latency associated
18856 to it, will be generated by reload. */
18857 return TARGET_POWER ? 5 : 4;
18858 case TYPE_BRANCH:
18859 /* Leave some extra cycles between a compare and its
18860 dependent branch, to inhibit expensive mispredicts. */
18861 if ((rs6000_cpu_attr == CPU_PPC603
18862 || rs6000_cpu_attr == CPU_PPC604
18863 || rs6000_cpu_attr == CPU_PPC604E
18864 || rs6000_cpu_attr == CPU_PPC620
18865 || rs6000_cpu_attr == CPU_PPC630
18866 || rs6000_cpu_attr == CPU_PPC750
18867 || rs6000_cpu_attr == CPU_PPC7400
18868 || rs6000_cpu_attr == CPU_PPC7450
18869 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18870 || rs6000_cpu_attr == CPU_POWER5
18871 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18872 && recog_memoized (dep_insn)
18873 && (INSN_CODE (dep_insn) >= 0))
982afe02 18874
44cd321e
PS
18875 switch (get_attr_type (dep_insn))
18876 {
18877 case TYPE_CMP:
18878 case TYPE_COMPARE:
18879 case TYPE_DELAYED_COMPARE:
18880 case TYPE_IMUL_COMPARE:
18881 case TYPE_LMUL_COMPARE:
18882 case TYPE_FPCOMPARE:
18883 case TYPE_CR_LOGICAL:
18884 case TYPE_DELAYED_CR:
18885 return cost + 2;
18886 default:
18887 break;
18888 }
18889 break;
18890
18891 case TYPE_STORE:
18892 case TYPE_STORE_U:
18893 case TYPE_STORE_UX:
18894 case TYPE_FPSTORE:
18895 case TYPE_FPSTORE_U:
18896 case TYPE_FPSTORE_UX:
18897 if ((rs6000_cpu == PROCESSOR_POWER6)
18898 && recog_memoized (dep_insn)
18899 && (INSN_CODE (dep_insn) >= 0))
18900 {
18901
18902 if (GET_CODE (PATTERN (insn)) != SET)
18903 /* If this happens, we have to extend this to schedule
18904 optimally. Return default for now. */
18905 return cost;
18906
18907 /* Adjust the cost for the case where the value written
18908 by a fixed point operation is used as the address
18909 gen value on a store. */
18910 switch (get_attr_type (dep_insn))
18911 {
18912 case TYPE_LOAD:
18913 case TYPE_LOAD_U:
18914 case TYPE_LOAD_UX:
18915 case TYPE_CNTLZ:
18916 {
18917 if (! store_data_bypass_p (dep_insn, insn))
18918 return 4;
18919 break;
18920 }
18921 case TYPE_LOAD_EXT:
18922 case TYPE_LOAD_EXT_U:
18923 case TYPE_LOAD_EXT_UX:
18924 case TYPE_VAR_SHIFT_ROTATE:
18925 case TYPE_VAR_DELAYED_COMPARE:
18926 {
18927 if (! store_data_bypass_p (dep_insn, insn))
18928 return 6;
18929 break;
18930 }
18931 case TYPE_INTEGER:
18932 case TYPE_COMPARE:
18933 case TYPE_FAST_COMPARE:
18934 case TYPE_EXTS:
18935 case TYPE_SHIFT:
18936 case TYPE_INSERT_WORD:
18937 case TYPE_INSERT_DWORD:
18938 case TYPE_FPLOAD_U:
18939 case TYPE_FPLOAD_UX:
18940 case TYPE_STORE_U:
18941 case TYPE_STORE_UX:
18942 case TYPE_FPSTORE_U:
18943 case TYPE_FPSTORE_UX:
18944 {
18945 if (! store_data_bypass_p (dep_insn, insn))
18946 return 3;
18947 break;
18948 }
18949 case TYPE_IMUL:
18950 case TYPE_IMUL2:
18951 case TYPE_IMUL3:
18952 case TYPE_LMUL:
18953 case TYPE_IMUL_COMPARE:
18954 case TYPE_LMUL_COMPARE:
18955 {
18956 if (! store_data_bypass_p (dep_insn, insn))
18957 return 17;
18958 break;
18959 }
18960 case TYPE_IDIV:
18961 {
18962 if (! store_data_bypass_p (dep_insn, insn))
18963 return 45;
18964 break;
18965 }
18966 case TYPE_LDIV:
18967 {
18968 if (! store_data_bypass_p (dep_insn, insn))
18969 return 57;
18970 break;
18971 }
18972 default:
18973 break;
18974 }
18975 }
18976 break;
18977
18978 case TYPE_LOAD:
18979 case TYPE_LOAD_U:
18980 case TYPE_LOAD_UX:
18981 case TYPE_LOAD_EXT:
18982 case TYPE_LOAD_EXT_U:
18983 case TYPE_LOAD_EXT_UX:
18984 if ((rs6000_cpu == PROCESSOR_POWER6)
18985 && recog_memoized (dep_insn)
18986 && (INSN_CODE (dep_insn) >= 0))
18987 {
18988
18989 /* Adjust the cost for the case where the value written
18990 by a fixed point instruction is used within the address
18991 gen portion of a subsequent load(u)(x) */
18992 switch (get_attr_type (dep_insn))
18993 {
18994 case TYPE_LOAD:
18995 case TYPE_LOAD_U:
18996 case TYPE_LOAD_UX:
18997 case TYPE_CNTLZ:
18998 {
18999 if (set_to_load_agen (dep_insn, insn))
19000 return 4;
19001 break;
19002 }
19003 case TYPE_LOAD_EXT:
19004 case TYPE_LOAD_EXT_U:
19005 case TYPE_LOAD_EXT_UX:
19006 case TYPE_VAR_SHIFT_ROTATE:
19007 case TYPE_VAR_DELAYED_COMPARE:
19008 {
19009 if (set_to_load_agen (dep_insn, insn))
19010 return 6;
19011 break;
19012 }
19013 case TYPE_INTEGER:
19014 case TYPE_COMPARE:
19015 case TYPE_FAST_COMPARE:
19016 case TYPE_EXTS:
19017 case TYPE_SHIFT:
19018 case TYPE_INSERT_WORD:
19019 case TYPE_INSERT_DWORD:
19020 case TYPE_FPLOAD_U:
19021 case TYPE_FPLOAD_UX:
19022 case TYPE_STORE_U:
19023 case TYPE_STORE_UX:
19024 case TYPE_FPSTORE_U:
19025 case TYPE_FPSTORE_UX:
19026 {
19027 if (set_to_load_agen (dep_insn, insn))
19028 return 3;
19029 break;
19030 }
19031 case TYPE_IMUL:
19032 case TYPE_IMUL2:
19033 case TYPE_IMUL3:
19034 case TYPE_LMUL:
19035 case TYPE_IMUL_COMPARE:
19036 case TYPE_LMUL_COMPARE:
19037 {
19038 if (set_to_load_agen (dep_insn, insn))
19039 return 17;
19040 break;
19041 }
19042 case TYPE_IDIV:
19043 {
19044 if (set_to_load_agen (dep_insn, insn))
19045 return 45;
19046 break;
19047 }
19048 case TYPE_LDIV:
19049 {
19050 if (set_to_load_agen (dep_insn, insn))
19051 return 57;
19052 break;
19053 }
19054 default:
19055 break;
19056 }
19057 }
19058 break;
19059
19060 case TYPE_FPLOAD:
19061 if ((rs6000_cpu == PROCESSOR_POWER6)
19062 && recog_memoized (dep_insn)
19063 && (INSN_CODE (dep_insn) >= 0)
19064 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
19065 return 2;
19066
19067 default:
19068 break;
19069 }
c9dbf840 19070
a251ffd0 19071 /* Fall out to return default cost. */
44cd321e
PS
19072 }
19073 break;
19074
19075 case REG_DEP_OUTPUT:
19076 /* Output dependency; DEP_INSN writes a register that INSN writes some
19077 cycles later. */
19078 if ((rs6000_cpu == PROCESSOR_POWER6)
19079 && recog_memoized (dep_insn)
19080 && (INSN_CODE (dep_insn) >= 0))
19081 {
19082 attr_type = get_attr_type (insn);
19083
19084 switch (attr_type)
19085 {
19086 case TYPE_FP:
19087 if (get_attr_type (dep_insn) == TYPE_FP)
19088 return 1;
19089 break;
19090 case TYPE_FPLOAD:
19091 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19092 return 2;
19093 break;
19094 default:
19095 break;
19096 }
19097 }
19098 case REG_DEP_ANTI:
19099 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19100 cycles later. */
19101 return 0;
19102
19103 default:
19104 gcc_unreachable ();
a251ffd0
TG
19105 }
19106
19107 return cost;
19108}
b6c9286a 19109
cbe26ab8 19110/* The function returns a true if INSN is microcoded.
839a4992 19111 Return false otherwise. */
cbe26ab8
DN
19112
19113static bool
19114is_microcoded_insn (rtx insn)
19115{
19116 if (!insn || !INSN_P (insn)
19117 || GET_CODE (PATTERN (insn)) == USE
19118 || GET_CODE (PATTERN (insn)) == CLOBBER)
19119 return false;
19120
d296e02e
AP
19121 if (rs6000_cpu_attr == CPU_CELL)
19122 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19123
ec507f2d 19124 if (rs6000_sched_groups)
cbe26ab8
DN
19125 {
19126 enum attr_type type = get_attr_type (insn);
19127 if (type == TYPE_LOAD_EXT_U
19128 || type == TYPE_LOAD_EXT_UX
19129 || type == TYPE_LOAD_UX
19130 || type == TYPE_STORE_UX
19131 || type == TYPE_MFCR)
c4ad648e 19132 return true;
cbe26ab8
DN
19133 }
19134
19135 return false;
19136}
19137
cbe26ab8
DN
19138/* The function returns true if INSN is cracked into 2 instructions
19139 by the processor (and therefore occupies 2 issue slots). */
19140
19141static bool
19142is_cracked_insn (rtx insn)
19143{
19144 if (!insn || !INSN_P (insn)
19145 || GET_CODE (PATTERN (insn)) == USE
19146 || GET_CODE (PATTERN (insn)) == CLOBBER)
19147 return false;
19148
ec507f2d 19149 if (rs6000_sched_groups)
cbe26ab8
DN
19150 {
19151 enum attr_type type = get_attr_type (insn);
19152 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19153 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19154 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19155 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19156 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19157 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19158 || type == TYPE_IDIV || type == TYPE_LDIV
19159 || type == TYPE_INSERT_WORD)
19160 return true;
cbe26ab8
DN
19161 }
19162
19163 return false;
19164}
19165
19166/* The function returns true if INSN can be issued only from
a3c9585f 19167 the branch slot. */
cbe26ab8
DN
19168
19169static bool
19170is_branch_slot_insn (rtx insn)
19171{
19172 if (!insn || !INSN_P (insn)
19173 || GET_CODE (PATTERN (insn)) == USE
19174 || GET_CODE (PATTERN (insn)) == CLOBBER)
19175 return false;
19176
ec507f2d 19177 if (rs6000_sched_groups)
cbe26ab8
DN
19178 {
19179 enum attr_type type = get_attr_type (insn);
19180 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19181 return true;
cbe26ab8
DN
19182 return false;
19183 }
19184
19185 return false;
19186}
79ae11c4 19187
44cd321e
PS
19188/* The function returns true if out_inst sets a value that is
19189 used in the address generation computation of in_insn */
19190static bool
19191set_to_load_agen (rtx out_insn, rtx in_insn)
19192{
19193 rtx out_set, in_set;
19194
19195 /* For performance reasons, only handle the simple case where
19196 both loads are a single_set. */
19197 out_set = single_set (out_insn);
19198 if (out_set)
19199 {
19200 in_set = single_set (in_insn);
19201 if (in_set)
19202 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19203 }
19204
19205 return false;
19206}
19207
19208/* The function returns true if the target storage location of
19209 out_insn is adjacent to the target storage location of in_insn */
19210/* Return 1 if memory locations are adjacent. */
19211
19212static bool
19213adjacent_mem_locations (rtx insn1, rtx insn2)
19214{
19215
e3a0e200
PB
19216 rtx a = get_store_dest (PATTERN (insn1));
19217 rtx b = get_store_dest (PATTERN (insn2));
19218
44cd321e
PS
19219 if ((GET_CODE (XEXP (a, 0)) == REG
19220 || (GET_CODE (XEXP (a, 0)) == PLUS
19221 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19222 && (GET_CODE (XEXP (b, 0)) == REG
19223 || (GET_CODE (XEXP (b, 0)) == PLUS
19224 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19225 {
f98e8938 19226 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19227 rtx reg0, reg1;
44cd321e
PS
19228
19229 if (GET_CODE (XEXP (a, 0)) == PLUS)
19230 {
19231 reg0 = XEXP (XEXP (a, 0), 0);
19232 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19233 }
19234 else
19235 reg0 = XEXP (a, 0);
19236
19237 if (GET_CODE (XEXP (b, 0)) == PLUS)
19238 {
19239 reg1 = XEXP (XEXP (b, 0), 0);
19240 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19241 }
19242 else
19243 reg1 = XEXP (b, 0);
19244
19245 val_diff = val1 - val0;
19246
19247 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19248 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19249 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19250 }
19251
19252 return false;
19253}
19254
a4f6c312 19255/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19256 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19257 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19258 define this macro if you do not need to adjust the scheduling
19259 priorities of insns. */
bef84347 19260
c237e94a 19261static int
a2369ed3 19262rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19263{
a4f6c312
SS
19264 /* On machines (like the 750) which have asymmetric integer units,
19265 where one integer unit can do multiply and divides and the other
19266 can't, reduce the priority of multiply/divide so it is scheduled
19267 before other integer operations. */
bef84347
VM
19268
19269#if 0
2c3c49de 19270 if (! INSN_P (insn))
bef84347
VM
19271 return priority;
19272
19273 if (GET_CODE (PATTERN (insn)) == USE)
19274 return priority;
19275
19276 switch (rs6000_cpu_attr) {
19277 case CPU_PPC750:
19278 switch (get_attr_type (insn))
19279 {
19280 default:
19281 break;
19282
19283 case TYPE_IMUL:
19284 case TYPE_IDIV:
3cb999d8
DE
19285 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19286 priority, priority);
bef84347
VM
19287 if (priority >= 0 && priority < 0x01000000)
19288 priority >>= 3;
19289 break;
19290 }
19291 }
19292#endif
19293
44cd321e 19294 if (insn_must_be_first_in_group (insn)
79ae11c4 19295 && reload_completed
f676971a 19296 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19297 && rs6000_sched_restricted_insns_priority)
19298 {
19299
c4ad648e
AM
19300 /* Prioritize insns that can be dispatched only in the first
19301 dispatch slot. */
79ae11c4 19302 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19303 /* Attach highest priority to insn. This means that in
19304 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19305 precede 'priority' (critical path) considerations. */
f676971a 19306 return current_sched_info->sched_max_insns_priority;
79ae11c4 19307 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19308 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19309 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19310 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19311 return (priority + 1);
19312 }
79ae11c4 19313
44cd321e
PS
19314 if (rs6000_cpu == PROCESSOR_POWER6
19315 && ((load_store_pendulum == -2 && is_load_insn (insn))
19316 || (load_store_pendulum == 2 && is_store_insn (insn))))
19317 /* Attach highest priority to insn if the scheduler has just issued two
19318 stores and this instruction is a load, or two loads and this instruction
19319 is a store. Power6 wants loads and stores scheduled alternately
19320 when possible */
19321 return current_sched_info->sched_max_insns_priority;
19322
bef84347
VM
19323 return priority;
19324}
19325
d296e02e
AP
19326/* Return true if the instruction is nonpipelined on the Cell. */
19327static bool
19328is_nonpipeline_insn (rtx insn)
19329{
19330 enum attr_type type;
19331 if (!insn || !INSN_P (insn)
19332 || GET_CODE (PATTERN (insn)) == USE
19333 || GET_CODE (PATTERN (insn)) == CLOBBER)
19334 return false;
19335
19336 type = get_attr_type (insn);
19337 if (type == TYPE_IMUL
19338 || type == TYPE_IMUL2
19339 || type == TYPE_IMUL3
19340 || type == TYPE_LMUL
19341 || type == TYPE_IDIV
19342 || type == TYPE_LDIV
19343 || type == TYPE_SDIV
19344 || type == TYPE_DDIV
19345 || type == TYPE_SSQRT
19346 || type == TYPE_DSQRT
19347 || type == TYPE_MFCR
19348 || type == TYPE_MFCRF
19349 || type == TYPE_MFJMPR)
19350 {
19351 return true;
19352 }
19353 return false;
19354}
19355
19356
a4f6c312
SS
19357/* Return how many instructions the machine can issue per cycle. */
19358
c237e94a 19359static int
863d938c 19360rs6000_issue_rate (void)
b6c9286a 19361{
3317bab1
DE
19362 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19363 if (!reload_completed)
19364 return 1;
19365
b6c9286a 19366 switch (rs6000_cpu_attr) {
3cb999d8
DE
19367 case CPU_RIOS1: /* ? */
19368 case CPU_RS64A:
19369 case CPU_PPC601: /* ? */
ed947a96 19370 case CPU_PPC7450:
3cb999d8 19371 return 3;
b54cf83a 19372 case CPU_PPC440:
b6c9286a 19373 case CPU_PPC603:
bef84347 19374 case CPU_PPC750:
ed947a96 19375 case CPU_PPC7400:
be12c2b0 19376 case CPU_PPC8540:
d296e02e 19377 case CPU_CELL:
fa41c305
EW
19378 case CPU_PPCE300C2:
19379 case CPU_PPCE300C3:
edae5fe3 19380 case CPU_PPCE500MC:
f676971a 19381 return 2;
3cb999d8 19382 case CPU_RIOS2:
b6c9286a 19383 case CPU_PPC604:
19684119 19384 case CPU_PPC604E:
b6c9286a 19385 case CPU_PPC620:
3cb999d8 19386 case CPU_PPC630:
b6c9286a 19387 return 4;
cbe26ab8 19388 case CPU_POWER4:
ec507f2d 19389 case CPU_POWER5:
44cd321e 19390 case CPU_POWER6:
cbe26ab8 19391 return 5;
b6c9286a
MM
19392 default:
19393 return 1;
19394 }
19395}
19396
be12c2b0
VM
19397/* Return how many instructions to look ahead for better insn
19398 scheduling. */
19399
19400static int
863d938c 19401rs6000_use_sched_lookahead (void)
be12c2b0
VM
19402{
19403 if (rs6000_cpu_attr == CPU_PPC8540)
19404 return 4;
d296e02e
AP
19405 if (rs6000_cpu_attr == CPU_CELL)
19406 return (reload_completed ? 8 : 0);
be12c2b0
VM
19407 return 0;
19408}
19409
d296e02e
AP
19410/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19411static int
19412rs6000_use_sched_lookahead_guard (rtx insn)
19413{
19414 if (rs6000_cpu_attr != CPU_CELL)
19415 return 1;
19416
19417 if (insn == NULL_RTX || !INSN_P (insn))
19418 abort ();
982afe02 19419
d296e02e
AP
19420 if (!reload_completed
19421 || is_nonpipeline_insn (insn)
19422 || is_microcoded_insn (insn))
19423 return 0;
19424
19425 return 1;
19426}
19427
569fa502
DN
19428/* Determine is PAT refers to memory. */
19429
19430static bool
19431is_mem_ref (rtx pat)
19432{
19433 const char * fmt;
19434 int i, j;
19435 bool ret = false;
19436
1de59bbd
DE
19437 /* stack_tie does not produce any real memory traffic. */
19438 if (GET_CODE (pat) == UNSPEC
19439 && XINT (pat, 1) == UNSPEC_TIE)
19440 return false;
19441
569fa502
DN
19442 if (GET_CODE (pat) == MEM)
19443 return true;
19444
19445 /* Recursively process the pattern. */
19446 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19447
19448 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19449 {
19450 if (fmt[i] == 'e')
19451 ret |= is_mem_ref (XEXP (pat, i));
19452 else if (fmt[i] == 'E')
19453 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19454 ret |= is_mem_ref (XVECEXP (pat, i, j));
19455 }
19456
19457 return ret;
19458}
19459
19460/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19461
569fa502
DN
19462static bool
19463is_load_insn1 (rtx pat)
19464{
19465 if (!pat || pat == NULL_RTX)
19466 return false;
19467
19468 if (GET_CODE (pat) == SET)
19469 return is_mem_ref (SET_SRC (pat));
19470
19471 if (GET_CODE (pat) == PARALLEL)
19472 {
19473 int i;
19474
19475 for (i = 0; i < XVECLEN (pat, 0); i++)
19476 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19477 return true;
19478 }
19479
19480 return false;
19481}
19482
19483/* Determine if INSN loads from memory. */
19484
19485static bool
19486is_load_insn (rtx insn)
19487{
19488 if (!insn || !INSN_P (insn))
19489 return false;
19490
19491 if (GET_CODE (insn) == CALL_INSN)
19492 return false;
19493
19494 return is_load_insn1 (PATTERN (insn));
19495}
19496
19497/* Determine if PAT is a PATTERN of a store insn. */
19498
19499static bool
19500is_store_insn1 (rtx pat)
19501{
19502 if (!pat || pat == NULL_RTX)
19503 return false;
19504
19505 if (GET_CODE (pat) == SET)
19506 return is_mem_ref (SET_DEST (pat));
19507
19508 if (GET_CODE (pat) == PARALLEL)
19509 {
19510 int i;
19511
19512 for (i = 0; i < XVECLEN (pat, 0); i++)
19513 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19514 return true;
19515 }
19516
19517 return false;
19518}
19519
19520/* Determine if INSN stores to memory. */
19521
19522static bool
19523is_store_insn (rtx insn)
19524{
19525 if (!insn || !INSN_P (insn))
19526 return false;
19527
19528 return is_store_insn1 (PATTERN (insn));
19529}
19530
e3a0e200
PB
19531/* Return the dest of a store insn. */
19532
19533static rtx
19534get_store_dest (rtx pat)
19535{
19536 gcc_assert (is_store_insn1 (pat));
19537
19538 if (GET_CODE (pat) == SET)
19539 return SET_DEST (pat);
19540 else if (GET_CODE (pat) == PARALLEL)
19541 {
19542 int i;
19543
19544 for (i = 0; i < XVECLEN (pat, 0); i++)
19545 {
19546 rtx inner_pat = XVECEXP (pat, 0, i);
19547 if (GET_CODE (inner_pat) == SET
19548 && is_mem_ref (SET_DEST (inner_pat)))
19549 return inner_pat;
19550 }
19551 }
19552 /* We shouldn't get here, because we should have either a simple
19553 store insn or a store with update which are covered above. */
19554 gcc_unreachable();
19555}
19556
569fa502
DN
19557/* Returns whether the dependence between INSN and NEXT is considered
19558 costly by the given target. */
19559
19560static bool
b198261f 19561rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19562{
b198261f
MK
19563 rtx insn;
19564 rtx next;
19565
aabcd309 19566 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19567 allow all dependent insns in the same group.
569fa502
DN
19568 This is the most aggressive option. */
19569 if (rs6000_sched_costly_dep == no_dep_costly)
19570 return false;
19571
f676971a 19572 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19573 do not allow dependent instructions in the same group.
19574 This is the most conservative option. */
19575 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19576 return true;
569fa502 19577
b198261f
MK
19578 insn = DEP_PRO (dep);
19579 next = DEP_CON (dep);
19580
f676971a
EC
19581 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19582 && is_load_insn (next)
569fa502
DN
19583 && is_store_insn (insn))
19584 /* Prevent load after store in the same group. */
19585 return true;
19586
19587 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19588 && is_load_insn (next)
569fa502 19589 && is_store_insn (insn)
e2f6ff94 19590 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19591 /* Prevent load after store in the same group if it is a true
19592 dependence. */
569fa502 19593 return true;
f676971a
EC
19594
19595 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19596 and will not be scheduled in the same group. */
19597 if (rs6000_sched_costly_dep <= max_dep_latency
19598 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19599 return true;
19600
19601 return false;
19602}
19603
f676971a 19604/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19605 skipping any "non-active" insns - insns that will not actually occupy
19606 an issue slot. Return NULL_RTX if such an insn is not found. */
19607
19608static rtx
19609get_next_active_insn (rtx insn, rtx tail)
19610{
f489aff8 19611 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19612 return NULL_RTX;
19613
f489aff8 19614 while (1)
cbe26ab8 19615 {
f489aff8
AM
19616 insn = NEXT_INSN (insn);
19617 if (insn == NULL_RTX || insn == tail)
19618 return NULL_RTX;
cbe26ab8 19619
f489aff8
AM
19620 if (CALL_P (insn)
19621 || JUMP_P (insn)
19622 || (NONJUMP_INSN_P (insn)
19623 && GET_CODE (PATTERN (insn)) != USE
19624 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19625 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19626 break;
19627 }
19628 return insn;
cbe26ab8
DN
19629}
19630
44cd321e
PS
19631/* We are about to begin issuing insns for this clock cycle. */
19632
19633static int
19634rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19635 rtx *ready ATTRIBUTE_UNUSED,
19636 int *pn_ready ATTRIBUTE_UNUSED,
19637 int clock_var ATTRIBUTE_UNUSED)
19638{
d296e02e
AP
19639 int n_ready = *pn_ready;
19640
44cd321e
PS
19641 if (sched_verbose)
19642 fprintf (dump, "// rs6000_sched_reorder :\n");
19643
d296e02e
AP
19644 /* Reorder the ready list, if the second to last ready insn
19645 is a nonepipeline insn. */
19646 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19647 {
19648 if (is_nonpipeline_insn (ready[n_ready - 1])
19649 && (recog_memoized (ready[n_ready - 2]) > 0))
19650 /* Simply swap first two insns. */
19651 {
19652 rtx tmp = ready[n_ready - 1];
19653 ready[n_ready - 1] = ready[n_ready - 2];
19654 ready[n_ready - 2] = tmp;
19655 }
19656 }
19657
44cd321e
PS
19658 if (rs6000_cpu == PROCESSOR_POWER6)
19659 load_store_pendulum = 0;
19660
19661 return rs6000_issue_rate ();
19662}
19663
19664/* Like rs6000_sched_reorder, but called after issuing each insn. */
19665
19666static int
19667rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19668 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19669{
19670 if (sched_verbose)
19671 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19672
19673 /* For Power6, we need to handle some special cases to try and keep the
19674 store queue from overflowing and triggering expensive flushes.
19675
19676 This code monitors how load and store instructions are being issued
19677 and skews the ready list one way or the other to increase the likelihood
19678 that a desired instruction is issued at the proper time.
19679
19680 A couple of things are done. First, we maintain a "load_store_pendulum"
19681 to track the current state of load/store issue.
19682
19683 - If the pendulum is at zero, then no loads or stores have been
19684 issued in the current cycle so we do nothing.
19685
19686 - If the pendulum is 1, then a single load has been issued in this
19687 cycle and we attempt to locate another load in the ready list to
19688 issue with it.
19689
2f8e468b 19690 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19691 issued in this cycle, so we increase the priority of the first load
19692 in the ready list to increase it's likelihood of being chosen first
19693 in the next cycle.
19694
19695 - If the pendulum is -1, then a single store has been issued in this
19696 cycle and we attempt to locate another store in the ready list to
19697 issue with it, preferring a store to an adjacent memory location to
19698 facilitate store pairing in the store queue.
19699
19700 - If the pendulum is 2, then two loads have already been
19701 issued in this cycle, so we increase the priority of the first store
19702 in the ready list to increase it's likelihood of being chosen first
19703 in the next cycle.
19704
19705 - If the pendulum < -2 or > 2, then do nothing.
19706
19707 Note: This code covers the most common scenarios. There exist non
19708 load/store instructions which make use of the LSU and which
19709 would need to be accounted for to strictly model the behavior
19710 of the machine. Those instructions are currently unaccounted
19711 for to help minimize compile time overhead of this code.
19712 */
19713 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19714 {
19715 int pos;
19716 int i;
19717 rtx tmp;
19718
19719 if (is_store_insn (last_scheduled_insn))
19720 /* Issuing a store, swing the load_store_pendulum to the left */
19721 load_store_pendulum--;
19722 else if (is_load_insn (last_scheduled_insn))
19723 /* Issuing a load, swing the load_store_pendulum to the right */
19724 load_store_pendulum++;
19725 else
19726 return cached_can_issue_more;
19727
19728 /* If the pendulum is balanced, or there is only one instruction on
19729 the ready list, then all is well, so return. */
19730 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19731 return cached_can_issue_more;
19732
19733 if (load_store_pendulum == 1)
19734 {
19735 /* A load has been issued in this cycle. Scan the ready list
19736 for another load to issue with it */
19737 pos = *pn_ready-1;
19738
19739 while (pos >= 0)
19740 {
19741 if (is_load_insn (ready[pos]))
19742 {
19743 /* Found a load. Move it to the head of the ready list,
19744 and adjust it's priority so that it is more likely to
19745 stay there */
19746 tmp = ready[pos];
19747 for (i=pos; i<*pn_ready-1; i++)
19748 ready[i] = ready[i + 1];
19749 ready[*pn_ready-1] = tmp;
e855c69d
AB
19750
19751 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19752 INSN_PRIORITY (tmp)++;
19753 break;
19754 }
19755 pos--;
19756 }
19757 }
19758 else if (load_store_pendulum == -2)
19759 {
19760 /* Two stores have been issued in this cycle. Increase the
19761 priority of the first load in the ready list to favor it for
19762 issuing in the next cycle. */
19763 pos = *pn_ready-1;
19764
19765 while (pos >= 0)
19766 {
19767 if (is_load_insn (ready[pos])
e855c69d
AB
19768 && !sel_sched_p ()
19769 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19770 {
19771 INSN_PRIORITY (ready[pos])++;
19772
19773 /* Adjust the pendulum to account for the fact that a load
19774 was found and increased in priority. This is to prevent
19775 increasing the priority of multiple loads */
19776 load_store_pendulum--;
19777
19778 break;
19779 }
19780 pos--;
19781 }
19782 }
19783 else if (load_store_pendulum == -1)
19784 {
19785 /* A store has been issued in this cycle. Scan the ready list for
19786 another store to issue with it, preferring a store to an adjacent
19787 memory location */
19788 int first_store_pos = -1;
19789
19790 pos = *pn_ready-1;
19791
19792 while (pos >= 0)
19793 {
19794 if (is_store_insn (ready[pos]))
19795 {
19796 /* Maintain the index of the first store found on the
19797 list */
19798 if (first_store_pos == -1)
19799 first_store_pos = pos;
19800
19801 if (is_store_insn (last_scheduled_insn)
19802 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19803 {
19804 /* Found an adjacent store. Move it to the head of the
19805 ready list, and adjust it's priority so that it is
19806 more likely to stay there */
19807 tmp = ready[pos];
19808 for (i=pos; i<*pn_ready-1; i++)
19809 ready[i] = ready[i + 1];
19810 ready[*pn_ready-1] = tmp;
e855c69d
AB
19811
19812 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19813 INSN_PRIORITY (tmp)++;
e855c69d 19814
44cd321e
PS
19815 first_store_pos = -1;
19816
19817 break;
19818 };
19819 }
19820 pos--;
19821 }
19822
19823 if (first_store_pos >= 0)
19824 {
19825 /* An adjacent store wasn't found, but a non-adjacent store was,
19826 so move the non-adjacent store to the front of the ready
19827 list, and adjust its priority so that it is more likely to
19828 stay there. */
19829 tmp = ready[first_store_pos];
19830 for (i=first_store_pos; i<*pn_ready-1; i++)
19831 ready[i] = ready[i + 1];
19832 ready[*pn_ready-1] = tmp;
e855c69d 19833 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19834 INSN_PRIORITY (tmp)++;
19835 }
19836 }
19837 else if (load_store_pendulum == 2)
19838 {
19839 /* Two loads have been issued in this cycle. Increase the priority
19840 of the first store in the ready list to favor it for issuing in
19841 the next cycle. */
19842 pos = *pn_ready-1;
19843
19844 while (pos >= 0)
19845 {
19846 if (is_store_insn (ready[pos])
e855c69d
AB
19847 && !sel_sched_p ()
19848 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19849 {
19850 INSN_PRIORITY (ready[pos])++;
19851
19852 /* Adjust the pendulum to account for the fact that a store
19853 was found and increased in priority. This is to prevent
19854 increasing the priority of multiple stores */
19855 load_store_pendulum++;
19856
19857 break;
19858 }
19859 pos--;
19860 }
19861 }
19862 }
19863
19864 return cached_can_issue_more;
19865}
19866
839a4992 19867/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19868 of group WHICH_GROUP.
19869
19870 If WHICH_GROUP == current_group, this function will return true if INSN
19871 causes the termination of the current group (i.e, the dispatch group to
19872 which INSN belongs). This means that INSN will be the last insn in the
19873 group it belongs to.
19874
19875 If WHICH_GROUP == previous_group, this function will return true if INSN
19876 causes the termination of the previous group (i.e, the dispatch group that
19877 precedes the group to which INSN belongs). This means that INSN will be
19878 the first insn in the group it belongs to). */
19879
19880static bool
19881insn_terminates_group_p (rtx insn, enum group_termination which_group)
19882{
44cd321e 19883 bool first, last;
cbe26ab8
DN
19884
19885 if (! insn)
19886 return false;
569fa502 19887
44cd321e
PS
19888 first = insn_must_be_first_in_group (insn);
19889 last = insn_must_be_last_in_group (insn);
cbe26ab8 19890
44cd321e 19891 if (first && last)
cbe26ab8
DN
19892 return true;
19893
19894 if (which_group == current_group)
44cd321e 19895 return last;
cbe26ab8 19896 else if (which_group == previous_group)
44cd321e
PS
19897 return first;
19898
19899 return false;
19900}
19901
19902
19903static bool
19904insn_must_be_first_in_group (rtx insn)
19905{
19906 enum attr_type type;
19907
19908 if (!insn
19909 || insn == NULL_RTX
19910 || GET_CODE (insn) == NOTE
19911 || GET_CODE (PATTERN (insn)) == USE
19912 || GET_CODE (PATTERN (insn)) == CLOBBER)
19913 return false;
19914
19915 switch (rs6000_cpu)
cbe26ab8 19916 {
44cd321e
PS
19917 case PROCESSOR_POWER5:
19918 if (is_cracked_insn (insn))
19919 return true;
19920 case PROCESSOR_POWER4:
19921 if (is_microcoded_insn (insn))
19922 return true;
19923
19924 if (!rs6000_sched_groups)
19925 return false;
19926
19927 type = get_attr_type (insn);
19928
19929 switch (type)
19930 {
19931 case TYPE_MFCR:
19932 case TYPE_MFCRF:
19933 case TYPE_MTCR:
19934 case TYPE_DELAYED_CR:
19935 case TYPE_CR_LOGICAL:
19936 case TYPE_MTJMPR:
19937 case TYPE_MFJMPR:
19938 case TYPE_IDIV:
19939 case TYPE_LDIV:
19940 case TYPE_LOAD_L:
19941 case TYPE_STORE_C:
19942 case TYPE_ISYNC:
19943 case TYPE_SYNC:
19944 return true;
19945 default:
19946 break;
19947 }
19948 break;
19949 case PROCESSOR_POWER6:
19950 type = get_attr_type (insn);
19951
19952 switch (type)
19953 {
19954 case TYPE_INSERT_DWORD:
19955 case TYPE_EXTS:
19956 case TYPE_CNTLZ:
19957 case TYPE_SHIFT:
19958 case TYPE_VAR_SHIFT_ROTATE:
19959 case TYPE_TRAP:
19960 case TYPE_IMUL:
19961 case TYPE_IMUL2:
19962 case TYPE_IMUL3:
19963 case TYPE_LMUL:
19964 case TYPE_IDIV:
19965 case TYPE_INSERT_WORD:
19966 case TYPE_DELAYED_COMPARE:
19967 case TYPE_IMUL_COMPARE:
19968 case TYPE_LMUL_COMPARE:
19969 case TYPE_FPCOMPARE:
19970 case TYPE_MFCR:
19971 case TYPE_MTCR:
19972 case TYPE_MFJMPR:
19973 case TYPE_MTJMPR:
19974 case TYPE_ISYNC:
19975 case TYPE_SYNC:
19976 case TYPE_LOAD_L:
19977 case TYPE_STORE_C:
19978 case TYPE_LOAD_U:
19979 case TYPE_LOAD_UX:
19980 case TYPE_LOAD_EXT_UX:
19981 case TYPE_STORE_U:
19982 case TYPE_STORE_UX:
19983 case TYPE_FPLOAD_U:
19984 case TYPE_FPLOAD_UX:
19985 case TYPE_FPSTORE_U:
19986 case TYPE_FPSTORE_UX:
19987 return true;
19988 default:
19989 break;
19990 }
19991 break;
19992 default:
19993 break;
19994 }
19995
19996 return false;
19997}
19998
19999static bool
20000insn_must_be_last_in_group (rtx insn)
20001{
20002 enum attr_type type;
20003
20004 if (!insn
20005 || insn == NULL_RTX
20006 || GET_CODE (insn) == NOTE
20007 || GET_CODE (PATTERN (insn)) == USE
20008 || GET_CODE (PATTERN (insn)) == CLOBBER)
20009 return false;
20010
20011 switch (rs6000_cpu) {
20012 case PROCESSOR_POWER4:
20013 case PROCESSOR_POWER5:
20014 if (is_microcoded_insn (insn))
20015 return true;
20016
20017 if (is_branch_slot_insn (insn))
20018 return true;
20019
20020 break;
20021 case PROCESSOR_POWER6:
20022 type = get_attr_type (insn);
20023
20024 switch (type)
20025 {
20026 case TYPE_EXTS:
20027 case TYPE_CNTLZ:
20028 case TYPE_SHIFT:
20029 case TYPE_VAR_SHIFT_ROTATE:
20030 case TYPE_TRAP:
20031 case TYPE_IMUL:
20032 case TYPE_IMUL2:
20033 case TYPE_IMUL3:
20034 case TYPE_LMUL:
20035 case TYPE_IDIV:
20036 case TYPE_DELAYED_COMPARE:
20037 case TYPE_IMUL_COMPARE:
20038 case TYPE_LMUL_COMPARE:
20039 case TYPE_FPCOMPARE:
20040 case TYPE_MFCR:
20041 case TYPE_MTCR:
20042 case TYPE_MFJMPR:
20043 case TYPE_MTJMPR:
20044 case TYPE_ISYNC:
20045 case TYPE_SYNC:
20046 case TYPE_LOAD_L:
20047 case TYPE_STORE_C:
20048 return true;
20049 default:
20050 break;
cbe26ab8 20051 }
44cd321e
PS
20052 break;
20053 default:
20054 break;
20055 }
cbe26ab8
DN
20056
20057 return false;
20058}
20059
839a4992 20060/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
20061 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
20062
20063static bool
20064is_costly_group (rtx *group_insns, rtx next_insn)
20065{
20066 int i;
cbe26ab8
DN
20067 int issue_rate = rs6000_issue_rate ();
20068
20069 for (i = 0; i < issue_rate; i++)
20070 {
e2f6ff94
MK
20071 sd_iterator_def sd_it;
20072 dep_t dep;
cbe26ab8 20073 rtx insn = group_insns[i];
b198261f 20074
cbe26ab8 20075 if (!insn)
c4ad648e 20076 continue;
b198261f 20077
e2f6ff94 20078 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 20079 {
b198261f
MK
20080 rtx next = DEP_CON (dep);
20081
20082 if (next == next_insn
20083 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20084 return true;
c4ad648e 20085 }
cbe26ab8
DN
20086 }
20087
20088 return false;
20089}
20090
f676971a 20091/* Utility of the function redefine_groups.
cbe26ab8
DN
20092 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20093 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20094 to keep it "far" (in a separate group) from GROUP_INSNS, following
20095 one of the following schemes, depending on the value of the flag
20096 -minsert_sched_nops = X:
20097 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 20098 in order to force NEXT_INSN into a separate group.
f676971a
EC
20099 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20100 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
20101 insertion (has a group just ended, how many vacant issue slots remain in the
20102 last group, and how many dispatch groups were encountered so far). */
20103
f676971a 20104static int
c4ad648e
AM
20105force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20106 rtx next_insn, bool *group_end, int can_issue_more,
20107 int *group_count)
cbe26ab8
DN
20108{
20109 rtx nop;
20110 bool force;
20111 int issue_rate = rs6000_issue_rate ();
20112 bool end = *group_end;
20113 int i;
20114
20115 if (next_insn == NULL_RTX)
20116 return can_issue_more;
20117
20118 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20119 return can_issue_more;
20120
20121 force = is_costly_group (group_insns, next_insn);
20122 if (!force)
20123 return can_issue_more;
20124
20125 if (sched_verbose > 6)
20126 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20127 *group_count ,can_issue_more);
cbe26ab8
DN
20128
20129 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20130 {
20131 if (*group_end)
c4ad648e 20132 can_issue_more = 0;
cbe26ab8
DN
20133
20134 /* Since only a branch can be issued in the last issue_slot, it is
20135 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20136 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20137 in this case the last nop will start a new group and the branch
20138 will be forced to the new group. */
cbe26ab8 20139 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20140 can_issue_more--;
cbe26ab8
DN
20141
20142 while (can_issue_more > 0)
c4ad648e 20143 {
9390387d 20144 nop = gen_nop ();
c4ad648e
AM
20145 emit_insn_before (nop, next_insn);
20146 can_issue_more--;
20147 }
cbe26ab8
DN
20148
20149 *group_end = true;
20150 return 0;
f676971a 20151 }
cbe26ab8
DN
20152
20153 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20154 {
20155 int n_nops = rs6000_sched_insert_nops;
20156
f676971a 20157 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20158 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20159 if (can_issue_more == 0)
c4ad648e 20160 can_issue_more = issue_rate;
cbe26ab8
DN
20161 can_issue_more--;
20162 if (can_issue_more == 0)
c4ad648e
AM
20163 {
20164 can_issue_more = issue_rate - 1;
20165 (*group_count)++;
20166 end = true;
20167 for (i = 0; i < issue_rate; i++)
20168 {
20169 group_insns[i] = 0;
20170 }
20171 }
cbe26ab8
DN
20172
20173 while (n_nops > 0)
c4ad648e
AM
20174 {
20175 nop = gen_nop ();
20176 emit_insn_before (nop, next_insn);
20177 if (can_issue_more == issue_rate - 1) /* new group begins */
20178 end = false;
20179 can_issue_more--;
20180 if (can_issue_more == 0)
20181 {
20182 can_issue_more = issue_rate - 1;
20183 (*group_count)++;
20184 end = true;
20185 for (i = 0; i < issue_rate; i++)
20186 {
20187 group_insns[i] = 0;
20188 }
20189 }
20190 n_nops--;
20191 }
cbe26ab8
DN
20192
20193 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20194 can_issue_more++;
cbe26ab8 20195
c4ad648e
AM
20196 /* Is next_insn going to start a new group? */
20197 *group_end
20198 = (end
cbe26ab8
DN
20199 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20200 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20201 || (can_issue_more < issue_rate &&
c4ad648e 20202 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20203 if (*group_end && end)
c4ad648e 20204 (*group_count)--;
cbe26ab8
DN
20205
20206 if (sched_verbose > 6)
c4ad648e
AM
20207 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20208 *group_count, can_issue_more);
f676971a
EC
20209 return can_issue_more;
20210 }
cbe26ab8
DN
20211
20212 return can_issue_more;
20213}
20214
20215/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20216 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20217 form in practice. It tries to achieve this synchronization by forcing the
20218 estimated processor grouping on the compiler (as opposed to the function
20219 'pad_goups' which tries to force the scheduler's grouping on the processor).
20220
20221 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20222 examines the (estimated) dispatch groups that will be formed by the processor
20223 dispatcher. It marks these group boundaries to reflect the estimated
20224 processor grouping, overriding the grouping that the scheduler had marked.
20225 Depending on the value of the flag '-minsert-sched-nops' this function can
20226 force certain insns into separate groups or force a certain distance between
20227 them by inserting nops, for example, if there exists a "costly dependence"
20228 between the insns.
20229
20230 The function estimates the group boundaries that the processor will form as
0fa2e4df 20231 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20232 each insn. A subsequent insn will start a new group if one of the following
20233 4 cases applies:
20234 - no more vacant issue slots remain in the current dispatch group.
20235 - only the last issue slot, which is the branch slot, is vacant, but the next
20236 insn is not a branch.
20237 - only the last 2 or less issue slots, including the branch slot, are vacant,
20238 which means that a cracked insn (which occupies two issue slots) can't be
20239 issued in this group.
f676971a 20240 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20241 start a new group. */
20242
20243static int
20244redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20245{
20246 rtx insn, next_insn;
20247 int issue_rate;
20248 int can_issue_more;
20249 int slot, i;
20250 bool group_end;
20251 int group_count = 0;
20252 rtx *group_insns;
20253
20254 /* Initialize. */
20255 issue_rate = rs6000_issue_rate ();
5ead67f6 20256 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20257 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20258 {
20259 group_insns[i] = 0;
20260 }
20261 can_issue_more = issue_rate;
20262 slot = 0;
20263 insn = get_next_active_insn (prev_head_insn, tail);
20264 group_end = false;
20265
20266 while (insn != NULL_RTX)
20267 {
20268 slot = (issue_rate - can_issue_more);
20269 group_insns[slot] = insn;
20270 can_issue_more =
c4ad648e 20271 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20272 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20273 can_issue_more = 0;
cbe26ab8
DN
20274
20275 next_insn = get_next_active_insn (insn, tail);
20276 if (next_insn == NULL_RTX)
c4ad648e 20277 return group_count + 1;
cbe26ab8 20278
c4ad648e
AM
20279 /* Is next_insn going to start a new group? */
20280 group_end
20281 = (can_issue_more == 0
20282 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20283 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20284 || (can_issue_more < issue_rate &&
20285 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20286
f676971a 20287 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20288 next_insn, &group_end, can_issue_more,
20289 &group_count);
cbe26ab8
DN
20290
20291 if (group_end)
c4ad648e
AM
20292 {
20293 group_count++;
20294 can_issue_more = 0;
20295 for (i = 0; i < issue_rate; i++)
20296 {
20297 group_insns[i] = 0;
20298 }
20299 }
cbe26ab8
DN
20300
20301 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20302 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20303 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20304 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20305
20306 insn = next_insn;
20307 if (can_issue_more == 0)
c4ad648e
AM
20308 can_issue_more = issue_rate;
20309 } /* while */
cbe26ab8
DN
20310
20311 return group_count;
20312}
20313
20314/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20315 dispatch group boundaries that the scheduler had marked. Pad with nops
20316 any dispatch groups which have vacant issue slots, in order to force the
20317 scheduler's grouping on the processor dispatcher. The function
20318 returns the number of dispatch groups found. */
20319
20320static int
20321pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20322{
20323 rtx insn, next_insn;
20324 rtx nop;
20325 int issue_rate;
20326 int can_issue_more;
20327 int group_end;
20328 int group_count = 0;
20329
20330 /* Initialize issue_rate. */
20331 issue_rate = rs6000_issue_rate ();
20332 can_issue_more = issue_rate;
20333
20334 insn = get_next_active_insn (prev_head_insn, tail);
20335 next_insn = get_next_active_insn (insn, tail);
20336
20337 while (insn != NULL_RTX)
20338 {
20339 can_issue_more =
20340 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20341
20342 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20343
20344 if (next_insn == NULL_RTX)
c4ad648e 20345 break;
cbe26ab8
DN
20346
20347 if (group_end)
c4ad648e
AM
20348 {
20349 /* If the scheduler had marked group termination at this location
e855c69d 20350 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20351 force group termination, pad the group with nops to force group
20352 termination. */
20353 if (can_issue_more
20354 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20355 && !insn_terminates_group_p (insn, current_group)
20356 && !insn_terminates_group_p (next_insn, previous_group))
20357 {
9390387d 20358 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20359 can_issue_more--;
20360
20361 while (can_issue_more)
20362 {
20363 nop = gen_nop ();
20364 emit_insn_before (nop, next_insn);
20365 can_issue_more--;
20366 }
20367 }
20368
20369 can_issue_more = issue_rate;
20370 group_count++;
20371 }
cbe26ab8
DN
20372
20373 insn = next_insn;
20374 next_insn = get_next_active_insn (insn, tail);
20375 }
20376
20377 return group_count;
20378}
20379
44cd321e
PS
20380/* We're beginning a new block. Initialize data structures as necessary. */
20381
20382static void
20383rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20384 int sched_verbose ATTRIBUTE_UNUSED,
20385 int max_ready ATTRIBUTE_UNUSED)
982afe02 20386{
44cd321e
PS
20387 last_scheduled_insn = NULL_RTX;
20388 load_store_pendulum = 0;
20389}
20390
cbe26ab8
DN
20391/* The following function is called at the end of scheduling BB.
20392 After reload, it inserts nops at insn group bundling. */
20393
20394static void
38f391a5 20395rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20396{
20397 int n_groups;
20398
20399 if (sched_verbose)
20400 fprintf (dump, "=== Finishing schedule.\n");
20401
ec507f2d 20402 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20403 {
e855c69d
AB
20404 /* Do not run sched_finish hook when selective scheduling enabled. */
20405 if (sel_sched_p ())
20406 return;
20407
cbe26ab8 20408 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20409 return;
cbe26ab8
DN
20410
20411 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20412 n_groups = pad_groups (dump, sched_verbose,
20413 current_sched_info->prev_head,
20414 current_sched_info->next_tail);
cbe26ab8 20415 else
c4ad648e
AM
20416 n_groups = redefine_groups (dump, sched_verbose,
20417 current_sched_info->prev_head,
20418 current_sched_info->next_tail);
cbe26ab8
DN
20419
20420 if (sched_verbose >= 6)
20421 {
20422 fprintf (dump, "ngroups = %d\n", n_groups);
20423 print_rtl (dump, current_sched_info->prev_head);
20424 fprintf (dump, "Done finish_sched\n");
20425 }
20426 }
20427}
e855c69d
AB
20428
20429struct _rs6000_sched_context
20430{
20431 short cached_can_issue_more;
20432 rtx last_scheduled_insn;
20433 int load_store_pendulum;
20434};
20435
20436typedef struct _rs6000_sched_context rs6000_sched_context_def;
20437typedef rs6000_sched_context_def *rs6000_sched_context_t;
20438
20439/* Allocate store for new scheduling context. */
20440static void *
20441rs6000_alloc_sched_context (void)
20442{
20443 return xmalloc (sizeof (rs6000_sched_context_def));
20444}
20445
20446/* If CLEAN_P is true then initializes _SC with clean data,
20447 and from the global context otherwise. */
20448static void
20449rs6000_init_sched_context (void *_sc, bool clean_p)
20450{
20451 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20452
20453 if (clean_p)
20454 {
20455 sc->cached_can_issue_more = 0;
20456 sc->last_scheduled_insn = NULL_RTX;
20457 sc->load_store_pendulum = 0;
20458 }
20459 else
20460 {
20461 sc->cached_can_issue_more = cached_can_issue_more;
20462 sc->last_scheduled_insn = last_scheduled_insn;
20463 sc->load_store_pendulum = load_store_pendulum;
20464 }
20465}
20466
20467/* Sets the global scheduling context to the one pointed to by _SC. */
20468static void
20469rs6000_set_sched_context (void *_sc)
20470{
20471 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20472
20473 gcc_assert (sc != NULL);
20474
20475 cached_can_issue_more = sc->cached_can_issue_more;
20476 last_scheduled_insn = sc->last_scheduled_insn;
20477 load_store_pendulum = sc->load_store_pendulum;
20478}
20479
20480/* Free _SC. */
20481static void
20482rs6000_free_sched_context (void *_sc)
20483{
20484 gcc_assert (_sc != NULL);
20485
20486 free (_sc);
20487}
20488
b6c9286a 20489\f
b6c9286a
MM
20490/* Length in units of the trampoline for entering a nested function. */
20491
20492int
863d938c 20493rs6000_trampoline_size (void)
b6c9286a
MM
20494{
20495 int ret = 0;
20496
20497 switch (DEFAULT_ABI)
20498 {
20499 default:
37409796 20500 gcc_unreachable ();
b6c9286a
MM
20501
20502 case ABI_AIX:
8f802bfb 20503 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20504 break;
20505
4dabc42d 20506 case ABI_DARWIN:
b6c9286a 20507 case ABI_V4:
03a7e1a5 20508 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20509 break;
b6c9286a
MM
20510 }
20511
20512 return ret;
20513}
20514
20515/* Emit RTL insns to initialize the variable parts of a trampoline.
20516 FNADDR is an RTX for the address of the function's pure code.
20517 CXT is an RTX for the static chain value for the function. */
20518
20519void
a2369ed3 20520rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20521{
8bd04c56 20522 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20523 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20524
20525 switch (DEFAULT_ABI)
20526 {
20527 default:
37409796 20528 gcc_unreachable ();
b6c9286a 20529
8bd04c56 20530/* Macros to shorten the code expansions below. */
9613eaff 20531#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20532#define MEM_PLUS(addr,offset) \
9613eaff 20533 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20534
b6c9286a
MM
20535 /* Under AIX, just build the 3 word function descriptor */
20536 case ABI_AIX:
8bd04c56 20537 {
9613eaff
SH
20538 rtx fn_reg = gen_reg_rtx (Pmode);
20539 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20540 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20541 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20542 emit_move_insn (MEM_DEREF (addr), fn_reg);
20543 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20544 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20545 }
b6c9286a
MM
20546 break;
20547
4dabc42d
TC
20548 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20549 case ABI_DARWIN:
b6c9286a 20550 case ABI_V4:
9613eaff 20551 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
bbbbb16a 20552 LCT_NORMAL, VOIDmode, 4,
9613eaff 20553 addr, Pmode,
eaf1bcf1 20554 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20555 fnaddr, Pmode,
20556 ctx_reg, Pmode);
b6c9286a 20557 break;
b6c9286a
MM
20558 }
20559
20560 return;
20561}
7509c759
MM
20562
20563\f
91d231cb 20564/* Table of valid machine attributes. */
a4f6c312 20565
91d231cb 20566const struct attribute_spec rs6000_attribute_table[] =
7509c759 20567{
91d231cb 20568 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20569 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20570 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20571 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20572 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20573 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20574#ifdef SUBTARGET_ATTRIBUTE_TABLE
20575 SUBTARGET_ATTRIBUTE_TABLE,
20576#endif
a5c76ee6 20577 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20578};
7509c759 20579
8bb418a3
ZL
20580/* Handle the "altivec" attribute. The attribute may have
20581 arguments as follows:
f676971a 20582
8bb418a3
ZL
20583 __attribute__((altivec(vector__)))
20584 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20585 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20586
20587 and may appear more than once (e.g., 'vector bool char') in a
20588 given declaration. */
20589
20590static tree
f90ac3f0
UP
20591rs6000_handle_altivec_attribute (tree *node,
20592 tree name ATTRIBUTE_UNUSED,
20593 tree args,
8bb418a3
ZL
20594 int flags ATTRIBUTE_UNUSED,
20595 bool *no_add_attrs)
20596{
20597 tree type = *node, result = NULL_TREE;
20598 enum machine_mode mode;
20599 int unsigned_p;
20600 char altivec_type
20601 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20602 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20603 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20604 : '?');
8bb418a3
ZL
20605
20606 while (POINTER_TYPE_P (type)
20607 || TREE_CODE (type) == FUNCTION_TYPE
20608 || TREE_CODE (type) == METHOD_TYPE
20609 || TREE_CODE (type) == ARRAY_TYPE)
20610 type = TREE_TYPE (type);
20611
20612 mode = TYPE_MODE (type);
20613
f90ac3f0
UP
20614 /* Check for invalid AltiVec type qualifiers. */
20615 if (type == long_unsigned_type_node || type == long_integer_type_node)
20616 {
20617 if (TARGET_64BIT)
20618 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20619 else if (rs6000_warn_altivec_long)
d4ee4d25 20620 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20621 }
20622 else if (type == long_long_unsigned_type_node
20623 || type == long_long_integer_type_node)
20624 error ("use of %<long long%> in AltiVec types is invalid");
20625 else if (type == double_type_node)
20626 error ("use of %<double%> in AltiVec types is invalid");
20627 else if (type == long_double_type_node)
20628 error ("use of %<long double%> in AltiVec types is invalid");
20629 else if (type == boolean_type_node)
20630 error ("use of boolean types in AltiVec types is invalid");
20631 else if (TREE_CODE (type) == COMPLEX_TYPE)
20632 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20633 else if (DECIMAL_FLOAT_MODE_P (mode))
20634 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20635
20636 switch (altivec_type)
20637 {
20638 case 'v':
8df83eae 20639 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20640 switch (mode)
20641 {
c4ad648e
AM
20642 case SImode:
20643 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20644 break;
20645 case HImode:
20646 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20647 break;
20648 case QImode:
20649 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20650 break;
20651 case SFmode: result = V4SF_type_node; break;
20652 /* If the user says 'vector int bool', we may be handed the 'bool'
20653 attribute _before_ the 'vector' attribute, and so select the
20654 proper type in the 'b' case below. */
20655 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20656 result = type;
20657 default: break;
8bb418a3
ZL
20658 }
20659 break;
20660 case 'b':
20661 switch (mode)
20662 {
c4ad648e
AM
20663 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20664 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20665 case QImode: case V16QImode: result = bool_V16QI_type_node;
20666 default: break;
8bb418a3
ZL
20667 }
20668 break;
20669 case 'p':
20670 switch (mode)
20671 {
c4ad648e
AM
20672 case V8HImode: result = pixel_V8HI_type_node;
20673 default: break;
8bb418a3
ZL
20674 }
20675 default: break;
20676 }
20677
4f538d42
UW
20678 /* Propagate qualifiers attached to the element type
20679 onto the vector type. */
20680 if (result && result != type && TYPE_QUALS (type))
20681 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20682
8bb418a3
ZL
20683 *no_add_attrs = true; /* No need to hang on to the attribute. */
20684
f90ac3f0 20685 if (result)
5dc11954 20686 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20687
20688 return NULL_TREE;
20689}
20690
f18eca82
ZL
20691/* AltiVec defines four built-in scalar types that serve as vector
20692 elements; we must teach the compiler how to mangle them. */
20693
20694static const char *
3101faab 20695rs6000_mangle_type (const_tree type)
f18eca82 20696{
608063c3
JB
20697 type = TYPE_MAIN_VARIANT (type);
20698
20699 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20700 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20701 return NULL;
20702
f18eca82
ZL
20703 if (type == bool_char_type_node) return "U6__boolc";
20704 if (type == bool_short_type_node) return "U6__bools";
20705 if (type == pixel_type_node) return "u7__pixel";
20706 if (type == bool_int_type_node) return "U6__booli";
20707
337bde91
DE
20708 /* Mangle IBM extended float long double as `g' (__float128) on
20709 powerpc*-linux where long-double-64 previously was the default. */
20710 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20711 && TARGET_ELF
20712 && TARGET_LONG_DOUBLE_128
20713 && !TARGET_IEEEQUAD)
20714 return "g";
20715
f18eca82
ZL
20716 /* For all other types, use normal C++ mangling. */
20717 return NULL;
20718}
20719
a5c76ee6
ZW
20720/* Handle a "longcall" or "shortcall" attribute; arguments as in
20721 struct attribute_spec.handler. */
a4f6c312 20722
91d231cb 20723static tree
f676971a
EC
20724rs6000_handle_longcall_attribute (tree *node, tree name,
20725 tree args ATTRIBUTE_UNUSED,
20726 int flags ATTRIBUTE_UNUSED,
a2369ed3 20727 bool *no_add_attrs)
91d231cb
JM
20728{
20729 if (TREE_CODE (*node) != FUNCTION_TYPE
20730 && TREE_CODE (*node) != FIELD_DECL
20731 && TREE_CODE (*node) != TYPE_DECL)
20732 {
29d08eba
JM
20733 warning (OPT_Wattributes, "%qE attribute only applies to functions",
20734 name);
91d231cb
JM
20735 *no_add_attrs = true;
20736 }
6a4cee5f 20737
91d231cb 20738 return NULL_TREE;
7509c759
MM
20739}
20740
a5c76ee6
ZW
20741/* Set longcall attributes on all functions declared when
20742 rs6000_default_long_calls is true. */
20743static void
a2369ed3 20744rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20745{
20746 if (rs6000_default_long_calls
20747 && (TREE_CODE (type) == FUNCTION_TYPE
20748 || TREE_CODE (type) == METHOD_TYPE))
20749 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20750 NULL_TREE,
20751 TYPE_ATTRIBUTES (type));
16d6f994
EC
20752
20753#if TARGET_MACHO
20754 darwin_set_default_type_attributes (type);
20755#endif
a5c76ee6
ZW
20756}
20757
3cb999d8
DE
20758/* Return a reference suitable for calling a function with the
20759 longcall attribute. */
a4f6c312 20760
9390387d 20761rtx
a2369ed3 20762rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20763{
d330fd93 20764 const char *call_name;
6a4cee5f
MM
20765 tree node;
20766
20767 if (GET_CODE (call_ref) != SYMBOL_REF)
20768 return call_ref;
20769
20770 /* System V adds '.' to the internal name, so skip them. */
20771 call_name = XSTR (call_ref, 0);
20772 if (*call_name == '.')
20773 {
20774 while (*call_name == '.')
20775 call_name++;
20776
20777 node = get_identifier (call_name);
39403d82 20778 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20779 }
20780
20781 return force_reg (Pmode, call_ref);
20782}
7509c759 20783\f
77ccdfed
EC
20784#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20785#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20786#endif
20787
20788/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20789 struct attribute_spec.handler. */
20790static tree
20791rs6000_handle_struct_attribute (tree *node, tree name,
20792 tree args ATTRIBUTE_UNUSED,
20793 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20794{
20795 tree *type = NULL;
20796 if (DECL_P (*node))
20797 {
20798 if (TREE_CODE (*node) == TYPE_DECL)
20799 type = &TREE_TYPE (*node);
20800 }
20801 else
20802 type = node;
20803
20804 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20805 || TREE_CODE (*type) == UNION_TYPE)))
20806 {
29d08eba 20807 warning (OPT_Wattributes, "%qE attribute ignored", name);
77ccdfed
EC
20808 *no_add_attrs = true;
20809 }
20810
20811 else if ((is_attribute_p ("ms_struct", name)
20812 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20813 || ((is_attribute_p ("gcc_struct", name)
20814 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20815 {
29d08eba
JM
20816 warning (OPT_Wattributes, "%qE incompatible attribute ignored",
20817 name);
77ccdfed
EC
20818 *no_add_attrs = true;
20819 }
20820
20821 return NULL_TREE;
20822}
20823
20824static bool
3101faab 20825rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20826{
20827 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20828 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20829 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20830}
20831\f
b64a1b53
RH
20832#ifdef USING_ELFOS_H
20833
d6b5193b 20834/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20835
d6b5193b
RS
20836static void
20837rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20838{
20839 if (DEFAULT_ABI == ABI_AIX
20840 && TARGET_MINIMAL_TOC
20841 && !TARGET_RELOCATABLE)
20842 {
20843 if (!toc_initialized)
20844 {
20845 toc_initialized = 1;
20846 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20847 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20848 fprintf (asm_out_file, "\t.tc ");
20849 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20850 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20851 fprintf (asm_out_file, "\n");
20852
20853 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20854 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20855 fprintf (asm_out_file, " = .+32768\n");
20856 }
20857 else
20858 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20859 }
20860 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20861 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20862 else
20863 {
20864 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20865 if (!toc_initialized)
20866 {
20867 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20868 fprintf (asm_out_file, " = .+32768\n");
20869 toc_initialized = 1;
20870 }
20871 }
20872}
20873
20874/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20875
b64a1b53 20876static void
d6b5193b
RS
20877rs6000_elf_asm_init_sections (void)
20878{
20879 toc_section
20880 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20881
20882 sdata2_section
20883 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20884 SDATA2_SECTION_ASM_OP);
20885}
20886
20887/* Implement TARGET_SELECT_RTX_SECTION. */
20888
20889static section *
f676971a 20890rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20891 unsigned HOST_WIDE_INT align)
7509c759 20892{
a9098fd0 20893 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20894 return toc_section;
7509c759 20895 else
d6b5193b 20896 return default_elf_select_rtx_section (mode, x, align);
7509c759 20897}
d9407988 20898\f
d1908feb
JJ
20899/* For a SYMBOL_REF, set generic flags and then perform some
20900 target-specific processing.
20901
d1908feb
JJ
20902 When the AIX ABI is requested on a non-AIX system, replace the
20903 function name with the real name (with a leading .) rather than the
20904 function descriptor name. This saves a lot of overriding code to
20905 read the prefixes. */
d9407988 20906
fb49053f 20907static void
a2369ed3 20908rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20909{
d1908feb 20910 default_encode_section_info (decl, rtl, first);
b2003250 20911
d1908feb
JJ
20912 if (first
20913 && TREE_CODE (decl) == FUNCTION_DECL
20914 && !TARGET_AIX
20915 && DEFAULT_ABI == ABI_AIX)
d9407988 20916 {
c6a2438a 20917 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20918 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20919 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20920 str[0] = '.';
20921 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20922 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20923 }
d9407988
MM
20924}
20925
21d9bb3f 20926static inline bool
0a2aaacc 20927compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20928{
20929 int len;
20930
0a2aaacc
KG
20931 len = strlen (templ);
20932 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20933 && (section[len] == 0 || section[len] == '.'));
20934}
20935
c1b7d95a 20936bool
3101faab 20937rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20938{
20939 if (rs6000_sdata == SDATA_NONE)
20940 return false;
20941
7482ad25
AF
20942 /* We want to merge strings, so we never consider them small data. */
20943 if (TREE_CODE (decl) == STRING_CST)
20944 return false;
20945
20946 /* Functions are never in the small data area. */
20947 if (TREE_CODE (decl) == FUNCTION_DECL)
20948 return false;
20949
0e5dbd9b
DE
20950 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20951 {
20952 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20953 if (compare_section_name (section, ".sdata")
20954 || compare_section_name (section, ".sdata2")
20955 || compare_section_name (section, ".gnu.linkonce.s")
20956 || compare_section_name (section, ".sbss")
20957 || compare_section_name (section, ".sbss2")
20958 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20959 || strcmp (section, ".PPC.EMB.sdata0") == 0
20960 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20961 return true;
20962 }
20963 else
20964 {
20965 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20966
20967 if (size > 0
307b599c 20968 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20969 /* If it's not public, and we're not going to reference it there,
20970 there's no need to put it in the small data section. */
0e5dbd9b
DE
20971 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20972 return true;
20973 }
20974
20975 return false;
20976}
20977
b91da81f 20978#endif /* USING_ELFOS_H */
aacd3885
RS
20979\f
20980/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20981
aacd3885 20982static bool
3101faab 20983rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20984{
20985 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20986}
a6c2a102 20987\f
000034eb 20988/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20989 ADDR can be effectively incremented by incrementing REG.
20990
20991 r0 is special and we must not select it as an address
20992 register by this routine since our caller will try to
20993 increment the returned register via an "la" instruction. */
000034eb 20994
9390387d 20995rtx
a2369ed3 20996find_addr_reg (rtx addr)
000034eb
DE
20997{
20998 while (GET_CODE (addr) == PLUS)
20999 {
02441cd6
JL
21000 if (GET_CODE (XEXP (addr, 0)) == REG
21001 && REGNO (XEXP (addr, 0)) != 0)
000034eb 21002 addr = XEXP (addr, 0);
02441cd6
JL
21003 else if (GET_CODE (XEXP (addr, 1)) == REG
21004 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
21005 addr = XEXP (addr, 1);
21006 else if (CONSTANT_P (XEXP (addr, 0)))
21007 addr = XEXP (addr, 1);
21008 else if (CONSTANT_P (XEXP (addr, 1)))
21009 addr = XEXP (addr, 0);
21010 else
37409796 21011 gcc_unreachable ();
000034eb 21012 }
37409796
NS
21013 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
21014 return addr;
000034eb
DE
21015}
21016
a6c2a102 21017void
a2369ed3 21018rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
21019{
21020 fatal_insn ("bad address", op);
21021}
c8023011 21022
ee890fe2
SS
21023#if TARGET_MACHO
21024
efdba735 21025static tree branch_island_list = 0;
ee890fe2 21026
efdba735
SH
21027/* Remember to generate a branch island for far calls to the given
21028 function. */
ee890fe2 21029
f676971a 21030static void
c4ad648e
AM
21031add_compiler_branch_island (tree label_name, tree function_name,
21032 int line_number)
ee890fe2 21033{
efdba735 21034 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 21035 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
21036 TREE_CHAIN (branch_island) = branch_island_list;
21037 branch_island_list = branch_island;
ee890fe2
SS
21038}
21039
efdba735
SH
21040#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
21041#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
21042#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
21043 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 21044
efdba735
SH
21045/* Generate far-jump branch islands for everything on the
21046 branch_island_list. Invoked immediately after the last instruction
21047 of the epilogue has been emitted; the branch-islands must be
21048 appended to, and contiguous with, the function body. Mach-O stubs
21049 are generated in machopic_output_stub(). */
ee890fe2 21050
efdba735
SH
21051static void
21052macho_branch_islands (void)
21053{
21054 char tmp_buf[512];
21055 tree branch_island;
21056
21057 for (branch_island = branch_island_list;
21058 branch_island;
21059 branch_island = TREE_CHAIN (branch_island))
21060 {
21061 const char *label =
21062 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
21063 const char *name =
11abc112 21064 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
21065 char name_buf[512];
21066 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
21067 if (name[0] == '*' || name[0] == '&')
21068 strcpy (name_buf, name+1);
21069 else
21070 {
21071 name_buf[0] = '_';
21072 strcpy (name_buf+1, name);
21073 }
21074 strcpy (tmp_buf, "\n");
21075 strcat (tmp_buf, label);
ee890fe2 21076#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21077 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21078 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21079#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
21080 if (flag_pic)
21081 {
21082 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21083 strcat (tmp_buf, label);
21084 strcat (tmp_buf, "_pic\n");
21085 strcat (tmp_buf, label);
21086 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 21087
efdba735
SH
21088 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21089 strcat (tmp_buf, name_buf);
21090 strcat (tmp_buf, " - ");
21091 strcat (tmp_buf, label);
21092 strcat (tmp_buf, "_pic)\n");
f676971a 21093
efdba735 21094 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 21095
efdba735
SH
21096 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21097 strcat (tmp_buf, name_buf);
21098 strcat (tmp_buf, " - ");
21099 strcat (tmp_buf, label);
21100 strcat (tmp_buf, "_pic)\n");
f676971a 21101
efdba735
SH
21102 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21103 }
21104 else
21105 {
21106 strcat (tmp_buf, ":\nlis r12,hi16(");
21107 strcat (tmp_buf, name_buf);
21108 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21109 strcat (tmp_buf, name_buf);
21110 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21111 }
21112 output_asm_insn (tmp_buf, 0);
ee890fe2 21113#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21114 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21115 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21116#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21117 }
ee890fe2 21118
efdba735 21119 branch_island_list = 0;
ee890fe2
SS
21120}
21121
21122/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21123 already there or not. */
21124
efdba735 21125static int
a2369ed3 21126no_previous_def (tree function_name)
ee890fe2 21127{
efdba735
SH
21128 tree branch_island;
21129 for (branch_island = branch_island_list;
21130 branch_island;
21131 branch_island = TREE_CHAIN (branch_island))
21132 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21133 return 0;
21134 return 1;
21135}
21136
21137/* GET_PREV_LABEL gets the label name from the previous definition of
21138 the function. */
21139
efdba735 21140static tree
a2369ed3 21141get_prev_label (tree function_name)
ee890fe2 21142{
efdba735
SH
21143 tree branch_island;
21144 for (branch_island = branch_island_list;
21145 branch_island;
21146 branch_island = TREE_CHAIN (branch_island))
21147 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21148 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21149 return 0;
21150}
21151
75b1b789
MS
21152#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21153#define DARWIN_LINKER_GENERATES_ISLANDS 0
21154#endif
21155
21156/* KEXTs still need branch islands. */
21157#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21158 || flag_mkernel || flag_apple_kext)
21159
ee890fe2 21160/* INSN is either a function call or a millicode call. It may have an
f676971a 21161 unconditional jump in its delay slot.
ee890fe2
SS
21162
21163 CALL_DEST is the routine we are calling. */
21164
21165char *
c4ad648e
AM
21166output_call (rtx insn, rtx *operands, int dest_operand_number,
21167 int cookie_operand_number)
ee890fe2
SS
21168{
21169 static char buf[256];
75b1b789
MS
21170 if (DARWIN_GENERATE_ISLANDS
21171 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21172 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21173 {
21174 tree labelname;
efdba735 21175 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21176
ee890fe2
SS
21177 if (no_previous_def (funname))
21178 {
ee890fe2
SS
21179 rtx label_rtx = gen_label_rtx ();
21180 char *label_buf, temp_buf[256];
21181 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21182 CODE_LABEL_NUMBER (label_rtx));
21183 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21184 labelname = get_identifier (label_buf);
a38e7aa5 21185 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21186 }
21187 else
21188 labelname = get_prev_label (funname);
21189
efdba735
SH
21190 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21191 instruction will reach 'foo', otherwise link as 'bl L42'".
21192 "L42" should be a 'branch island', that will do a far jump to
21193 'foo'. Branch islands are generated in
21194 macho_branch_islands(). */
ee890fe2 21195 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21196 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21197 }
21198 else
efdba735
SH
21199 sprintf (buf, "bl %%z%d", dest_operand_number);
21200 return buf;
ee890fe2
SS
21201}
21202
ee890fe2
SS
21203/* Generate PIC and indirect symbol stubs. */
21204
21205void
a2369ed3 21206machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21207{
21208 unsigned int length;
a4f6c312
SS
21209 char *symbol_name, *lazy_ptr_name;
21210 char *local_label_0;
ee890fe2
SS
21211 static int label = 0;
21212
df56a27f 21213 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21214 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21215
ee890fe2 21216
ee890fe2 21217 length = strlen (symb);
5ead67f6 21218 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21219 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21220
5ead67f6 21221 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21222 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21223
ee890fe2 21224 if (flag_pic == 2)
56c779bc 21225 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21226 else
56c779bc 21227 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21228
21229 if (flag_pic == 2)
21230 {
d974312d
DJ
21231 fprintf (file, "\t.align 5\n");
21232
21233 fprintf (file, "%s:\n", stub);
21234 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21235
876455fa 21236 label++;
5ead67f6 21237 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21238 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21239
ee890fe2
SS
21240 fprintf (file, "\tmflr r0\n");
21241 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21242 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21243 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21244 lazy_ptr_name, local_label_0);
21245 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21246 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21247 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21248 lazy_ptr_name, local_label_0);
21249 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21250 fprintf (file, "\tbctr\n");
21251 }
21252 else
d974312d
DJ
21253 {
21254 fprintf (file, "\t.align 4\n");
21255
21256 fprintf (file, "%s:\n", stub);
21257 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21258
21259 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21260 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21261 (TARGET_64BIT ? "ldu" : "lwzu"),
21262 lazy_ptr_name);
d974312d
DJ
21263 fprintf (file, "\tmtctr r12\n");
21264 fprintf (file, "\tbctr\n");
21265 }
f676971a 21266
56c779bc 21267 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21268 fprintf (file, "%s:\n", lazy_ptr_name);
21269 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21270 fprintf (file, "%sdyld_stub_binding_helper\n",
21271 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21272}
21273
21274/* Legitimize PIC addresses. If the address is already
21275 position-independent, we return ORIG. Newly generated
21276 position-independent addresses go into a reg. This is REG if non
21277 zero, otherwise we allocate register(s) as necessary. */
21278
4fbbe694 21279#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21280
21281rtx
f676971a 21282rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21283 rtx reg)
ee890fe2
SS
21284{
21285 rtx base, offset;
21286
21287 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21288 reg = gen_reg_rtx (Pmode);
21289
21290 if (GET_CODE (orig) == CONST)
21291 {
37409796
NS
21292 rtx reg_temp;
21293
ee890fe2
SS
21294 if (GET_CODE (XEXP (orig, 0)) == PLUS
21295 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21296 return orig;
21297
37409796 21298 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21299
37409796
NS
21300 /* Use a different reg for the intermediate value, as
21301 it will be marked UNCHANGING. */
b3a13419 21302 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21303 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21304 Pmode, reg_temp);
21305 offset =
21306 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21307 Pmode, reg);
bb8df8a6 21308
ee890fe2
SS
21309 if (GET_CODE (offset) == CONST_INT)
21310 {
21311 if (SMALL_INT (offset))
ed8908e7 21312 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21313 else if (! reload_in_progress && ! reload_completed)
21314 offset = force_reg (Pmode, offset);
21315 else
c859cda6
DJ
21316 {
21317 rtx mem = force_const_mem (Pmode, orig);
21318 return machopic_legitimize_pic_address (mem, Pmode, reg);
21319 }
ee890fe2 21320 }
f1c25d3b 21321 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21322 }
21323
21324 /* Fall back on generic machopic code. */
21325 return machopic_legitimize_pic_address (orig, mode, reg);
21326}
21327
c4e18b1c
GK
21328/* Output a .machine directive for the Darwin assembler, and call
21329 the generic start_file routine. */
21330
21331static void
21332rs6000_darwin_file_start (void)
21333{
94ff898d 21334 static const struct
c4e18b1c
GK
21335 {
21336 const char *arg;
21337 const char *name;
21338 int if_set;
21339 } mapping[] = {
55dbfb48 21340 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21341 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21342 { "power4", "ppc970", 0 },
21343 { "G5", "ppc970", 0 },
21344 { "7450", "ppc7450", 0 },
21345 { "7400", "ppc7400", MASK_ALTIVEC },
21346 { "G4", "ppc7400", 0 },
21347 { "750", "ppc750", 0 },
21348 { "740", "ppc750", 0 },
21349 { "G3", "ppc750", 0 },
21350 { "604e", "ppc604e", 0 },
21351 { "604", "ppc604", 0 },
21352 { "603e", "ppc603", 0 },
21353 { "603", "ppc603", 0 },
21354 { "601", "ppc601", 0 },
21355 { NULL, "ppc", 0 } };
21356 const char *cpu_id = "";
21357 size_t i;
94ff898d 21358
9390387d 21359 rs6000_file_start ();
192d0f89 21360 darwin_file_start ();
c4e18b1c
GK
21361
21362 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21363 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21364 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21365 && rs6000_select[i].string[0] != '\0')
21366 cpu_id = rs6000_select[i].string;
21367
21368 /* Look through the mapping array. Pick the first name that either
21369 matches the argument, has a bit set in IF_SET that is also set
21370 in the target flags, or has a NULL name. */
21371
21372 i = 0;
21373 while (mapping[i].arg != NULL
21374 && strcmp (mapping[i].arg, cpu_id) != 0
21375 && (mapping[i].if_set & target_flags) == 0)
21376 i++;
21377
21378 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21379}
21380
ee890fe2 21381#endif /* TARGET_MACHO */
7c262518
RH
21382
21383#if TARGET_ELF
9b580a0b
RH
21384static int
21385rs6000_elf_reloc_rw_mask (void)
7c262518 21386{
9b580a0b
RH
21387 if (flag_pic)
21388 return 3;
21389 else if (DEFAULT_ABI == ABI_AIX)
21390 return 2;
21391 else
21392 return 0;
7c262518 21393}
d9f6800d
RH
21394
21395/* Record an element in the table of global constructors. SYMBOL is
21396 a SYMBOL_REF of the function to be called; PRIORITY is a number
21397 between 0 and MAX_INIT_PRIORITY.
21398
21399 This differs from default_named_section_asm_out_constructor in
21400 that we have special handling for -mrelocatable. */
21401
21402static void
a2369ed3 21403rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21404{
21405 const char *section = ".ctors";
21406 char buf[16];
21407
21408 if (priority != DEFAULT_INIT_PRIORITY)
21409 {
21410 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21411 /* Invert the numbering so the linker puts us in the proper
21412 order; constructors are run from right to left, and the
21413 linker sorts in increasing order. */
21414 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21415 section = buf;
21416 }
21417
d6b5193b 21418 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21419 assemble_align (POINTER_SIZE);
d9f6800d
RH
21420
21421 if (TARGET_RELOCATABLE)
21422 {
21423 fputs ("\t.long (", asm_out_file);
21424 output_addr_const (asm_out_file, symbol);
21425 fputs (")@fixup\n", asm_out_file);
21426 }
21427 else
c8af3574 21428 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21429}
21430
21431static void
a2369ed3 21432rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21433{
21434 const char *section = ".dtors";
21435 char buf[16];
21436
21437 if (priority != DEFAULT_INIT_PRIORITY)
21438 {
21439 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21440 /* Invert the numbering so the linker puts us in the proper
21441 order; constructors are run from right to left, and the
21442 linker sorts in increasing order. */
21443 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21444 section = buf;
21445 }
21446
d6b5193b 21447 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21448 assemble_align (POINTER_SIZE);
d9f6800d
RH
21449
21450 if (TARGET_RELOCATABLE)
21451 {
21452 fputs ("\t.long (", asm_out_file);
21453 output_addr_const (asm_out_file, symbol);
21454 fputs (")@fixup\n", asm_out_file);
21455 }
21456 else
c8af3574 21457 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21458}
9739c90c
JJ
21459
21460void
a2369ed3 21461rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21462{
21463 if (TARGET_64BIT)
21464 {
21465 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21466 ASM_OUTPUT_LABEL (file, name);
21467 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21468 rs6000_output_function_entry (file, name);
21469 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21470 if (DOT_SYMBOLS)
9739c90c 21471 {
85b776df 21472 fputs ("\t.size\t", file);
9739c90c 21473 assemble_name (file, name);
85b776df
AM
21474 fputs (",24\n\t.type\t.", file);
21475 assemble_name (file, name);
21476 fputs (",@function\n", file);
21477 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21478 {
21479 fputs ("\t.globl\t.", file);
21480 assemble_name (file, name);
21481 putc ('\n', file);
21482 }
9739c90c 21483 }
85b776df
AM
21484 else
21485 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21486 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21487 rs6000_output_function_entry (file, name);
21488 fputs (":\n", file);
9739c90c
JJ
21489 return;
21490 }
21491
21492 if (TARGET_RELOCATABLE
7f970b70 21493 && !TARGET_SECURE_PLT
e3b5732b 21494 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21495 && uses_TOC ())
9739c90c
JJ
21496 {
21497 char buf[256];
21498
21499 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21500
21501 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21502 fprintf (file, "\t.long ");
21503 assemble_name (file, buf);
21504 putc ('-', file);
21505 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21506 assemble_name (file, buf);
21507 putc ('\n', file);
21508 }
21509
21510 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21511 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21512
21513 if (DEFAULT_ABI == ABI_AIX)
21514 {
21515 const char *desc_name, *orig_name;
21516
21517 orig_name = (*targetm.strip_name_encoding) (name);
21518 desc_name = orig_name;
21519 while (*desc_name == '.')
21520 desc_name++;
21521
21522 if (TREE_PUBLIC (decl))
21523 fprintf (file, "\t.globl %s\n", desc_name);
21524
21525 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21526 fprintf (file, "%s:\n", desc_name);
21527 fprintf (file, "\t.long %s\n", orig_name);
21528 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21529 if (DEFAULT_ABI == ABI_AIX)
21530 fputs ("\t.long 0\n", file);
21531 fprintf (file, "\t.previous\n");
21532 }
21533 ASM_OUTPUT_LABEL (file, name);
21534}
1334b570
AM
21535
21536static void
21537rs6000_elf_end_indicate_exec_stack (void)
21538{
21539 if (TARGET_32BIT)
21540 file_end_indicate_exec_stack ();
21541}
7c262518
RH
21542#endif
21543
cbaaba19 21544#if TARGET_XCOFF
0d5817b2
DE
21545static void
21546rs6000_xcoff_asm_output_anchor (rtx symbol)
21547{
21548 char buffer[100];
21549
21550 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21551 SYMBOL_REF_BLOCK_OFFSET (symbol));
21552 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21553}
21554
7c262518 21555static void
a2369ed3 21556rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21557{
21558 fputs (GLOBAL_ASM_OP, stream);
21559 RS6000_OUTPUT_BASENAME (stream, name);
21560 putc ('\n', stream);
21561}
21562
d6b5193b
RS
21563/* A get_unnamed_decl callback, used for read-only sections. PTR
21564 points to the section string variable. */
21565
21566static void
21567rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21568{
890f9edf
OH
21569 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21570 *(const char *const *) directive,
21571 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21572}
21573
21574/* Likewise for read-write sections. */
21575
21576static void
21577rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21578{
890f9edf
OH
21579 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21580 *(const char *const *) directive,
21581 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21582}
21583
21584/* A get_unnamed_section callback, used for switching to toc_section. */
21585
21586static void
21587rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21588{
21589 if (TARGET_MINIMAL_TOC)
21590 {
21591 /* toc_section is always selected at least once from
21592 rs6000_xcoff_file_start, so this is guaranteed to
21593 always be defined once and only once in each file. */
21594 if (!toc_initialized)
21595 {
21596 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21597 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21598 toc_initialized = 1;
21599 }
21600 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21601 (TARGET_32BIT ? "" : ",3"));
21602 }
21603 else
21604 fputs ("\t.toc\n", asm_out_file);
21605}
21606
21607/* Implement TARGET_ASM_INIT_SECTIONS. */
21608
21609static void
21610rs6000_xcoff_asm_init_sections (void)
21611{
21612 read_only_data_section
21613 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21614 &xcoff_read_only_section_name);
21615
21616 private_data_section
21617 = get_unnamed_section (SECTION_WRITE,
21618 rs6000_xcoff_output_readwrite_section_asm_op,
21619 &xcoff_private_data_section_name);
21620
21621 read_only_private_data_section
21622 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21623 &xcoff_private_data_section_name);
21624
21625 toc_section
21626 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21627
21628 readonly_data_section = read_only_data_section;
21629 exception_section = data_section;
21630}
21631
9b580a0b
RH
21632static int
21633rs6000_xcoff_reloc_rw_mask (void)
21634{
21635 return 3;
21636}
21637
b275d088 21638static void
c18a5b6c
MM
21639rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21640 tree decl ATTRIBUTE_UNUSED)
7c262518 21641{
0e5dbd9b
DE
21642 int smclass;
21643 static const char * const suffix[3] = { "PR", "RO", "RW" };
21644
21645 if (flags & SECTION_CODE)
21646 smclass = 0;
21647 else if (flags & SECTION_WRITE)
21648 smclass = 2;
21649 else
21650 smclass = 1;
21651
5b5198f7 21652 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21653 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21654 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21655}
ae46c4e0 21656
d6b5193b 21657static section *
f676971a 21658rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21659 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21660{
9b580a0b 21661 if (decl_readonly_section (decl, reloc))
ae46c4e0 21662 {
0e5dbd9b 21663 if (TREE_PUBLIC (decl))
d6b5193b 21664 return read_only_data_section;
ae46c4e0 21665 else
d6b5193b 21666 return read_only_private_data_section;
ae46c4e0
RH
21667 }
21668 else
21669 {
0e5dbd9b 21670 if (TREE_PUBLIC (decl))
d6b5193b 21671 return data_section;
ae46c4e0 21672 else
d6b5193b 21673 return private_data_section;
ae46c4e0
RH
21674 }
21675}
21676
21677static void
a2369ed3 21678rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21679{
21680 const char *name;
ae46c4e0 21681
5b5198f7
DE
21682 /* Use select_section for private and uninitialized data. */
21683 if (!TREE_PUBLIC (decl)
21684 || DECL_COMMON (decl)
0e5dbd9b
DE
21685 || DECL_INITIAL (decl) == NULL_TREE
21686 || DECL_INITIAL (decl) == error_mark_node
21687 || (flag_zero_initialized_in_bss
21688 && initializer_zerop (DECL_INITIAL (decl))))
21689 return;
21690
21691 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21692 name = (*targetm.strip_name_encoding) (name);
21693 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21694}
b64a1b53 21695
fb49053f
RH
21696/* Select section for constant in constant pool.
21697
21698 On RS/6000, all constants are in the private read-only data area.
21699 However, if this is being placed in the TOC it must be output as a
21700 toc entry. */
21701
d6b5193b 21702static section *
f676971a 21703rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21704 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21705{
21706 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21707 return toc_section;
b64a1b53 21708 else
d6b5193b 21709 return read_only_private_data_section;
b64a1b53 21710}
772c5265
RH
21711
21712/* Remove any trailing [DS] or the like from the symbol name. */
21713
21714static const char *
a2369ed3 21715rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21716{
21717 size_t len;
21718 if (*name == '*')
21719 name++;
21720 len = strlen (name);
21721 if (name[len - 1] == ']')
21722 return ggc_alloc_string (name, len - 4);
21723 else
21724 return name;
21725}
21726
5add3202
DE
21727/* Section attributes. AIX is always PIC. */
21728
21729static unsigned int
a2369ed3 21730rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21731{
5b5198f7 21732 unsigned int align;
9b580a0b 21733 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21734
21735 /* Align to at least UNIT size. */
21736 if (flags & SECTION_CODE)
21737 align = MIN_UNITS_PER_WORD;
21738 else
21739 /* Increase alignment of large objects if not already stricter. */
21740 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21741 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21742 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21743
21744 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21745}
a5fe455b 21746
1bc7c5b6
ZW
21747/* Output at beginning of assembler file.
21748
21749 Initialize the section names for the RS/6000 at this point.
21750
21751 Specify filename, including full path, to assembler.
21752
21753 We want to go into the TOC section so at least one .toc will be emitted.
21754 Also, in order to output proper .bs/.es pairs, we need at least one static
21755 [RW] section emitted.
21756
21757 Finally, declare mcount when profiling to make the assembler happy. */
21758
21759static void
863d938c 21760rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21761{
21762 rs6000_gen_section_name (&xcoff_bss_section_name,
21763 main_input_filename, ".bss_");
21764 rs6000_gen_section_name (&xcoff_private_data_section_name,
21765 main_input_filename, ".rw_");
21766 rs6000_gen_section_name (&xcoff_read_only_section_name,
21767 main_input_filename, ".ro_");
21768
21769 fputs ("\t.file\t", asm_out_file);
21770 output_quoted_string (asm_out_file, main_input_filename);
21771 fputc ('\n', asm_out_file);
1bc7c5b6 21772 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21773 switch_to_section (private_data_section);
21774 switch_to_section (text_section);
1bc7c5b6
ZW
21775 if (profile_flag)
21776 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21777 rs6000_file_start ();
21778}
21779
a5fe455b
ZW
21780/* Output at end of assembler file.
21781 On the RS/6000, referencing data should automatically pull in text. */
21782
21783static void
863d938c 21784rs6000_xcoff_file_end (void)
a5fe455b 21785{
d6b5193b 21786 switch_to_section (text_section);
a5fe455b 21787 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21788 switch_to_section (data_section);
a5fe455b
ZW
21789 fputs (TARGET_32BIT
21790 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21791 asm_out_file);
21792}
f1384257 21793#endif /* TARGET_XCOFF */
0e5dbd9b 21794
3c50106f
RH
21795/* Compute a (partial) cost for rtx X. Return true if the complete
21796 cost has been computed, and false if subexpressions should be
21797 scanned. In either case, *TOTAL contains the cost result. */
21798
21799static bool
f40751dd
JH
21800rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21801 bool speed)
3c50106f 21802{
f0517163
RS
21803 enum machine_mode mode = GET_MODE (x);
21804
3c50106f
RH
21805 switch (code)
21806 {
30a555d9 21807 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21808 case CONST_INT:
066cd967
DE
21809 if (((outer_code == SET
21810 || outer_code == PLUS
21811 || outer_code == MINUS)
279bb624
DE
21812 && (satisfies_constraint_I (x)
21813 || satisfies_constraint_L (x)))
066cd967 21814 || (outer_code == AND
279bb624
DE
21815 && (satisfies_constraint_K (x)
21816 || (mode == SImode
21817 ? satisfies_constraint_L (x)
21818 : satisfies_constraint_J (x))
1990cd79
AM
21819 || mask_operand (x, mode)
21820 || (mode == DImode
21821 && mask64_operand (x, DImode))))
22e54023 21822 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21823 && (satisfies_constraint_K (x)
21824 || (mode == SImode
21825 ? satisfies_constraint_L (x)
21826 : satisfies_constraint_J (x))))
066cd967
DE
21827 || outer_code == ASHIFT
21828 || outer_code == ASHIFTRT
21829 || outer_code == LSHIFTRT
21830 || outer_code == ROTATE
21831 || outer_code == ROTATERT
d5861a7a 21832 || outer_code == ZERO_EXTRACT
066cd967 21833 || (outer_code == MULT
279bb624 21834 && satisfies_constraint_I (x))
22e54023
DE
21835 || ((outer_code == DIV || outer_code == UDIV
21836 || outer_code == MOD || outer_code == UMOD)
21837 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21838 || (outer_code == COMPARE
279bb624
DE
21839 && (satisfies_constraint_I (x)
21840 || satisfies_constraint_K (x)))
22e54023 21841 || (outer_code == EQ
279bb624
DE
21842 && (satisfies_constraint_I (x)
21843 || satisfies_constraint_K (x)
21844 || (mode == SImode
21845 ? satisfies_constraint_L (x)
21846 : satisfies_constraint_J (x))))
22e54023 21847 || (outer_code == GTU
279bb624 21848 && satisfies_constraint_I (x))
22e54023 21849 || (outer_code == LTU
279bb624 21850 && satisfies_constraint_P (x)))
066cd967
DE
21851 {
21852 *total = 0;
21853 return true;
21854 }
21855 else if ((outer_code == PLUS
4ae234b0 21856 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21857 || (outer_code == MINUS
4ae234b0 21858 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21859 || ((outer_code == SET
21860 || outer_code == IOR
21861 || outer_code == XOR)
21862 && (INTVAL (x)
21863 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21864 {
21865 *total = COSTS_N_INSNS (1);
21866 return true;
21867 }
21868 /* FALLTHRU */
21869
21870 case CONST_DOUBLE:
f6fe3a22 21871 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21872 {
f6fe3a22
DE
21873 if ((outer_code == IOR || outer_code == XOR)
21874 && CONST_DOUBLE_HIGH (x) == 0
21875 && (CONST_DOUBLE_LOW (x)
21876 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21877 {
21878 *total = 0;
21879 return true;
21880 }
21881 else if ((outer_code == AND && and64_2_operand (x, DImode))
21882 || ((outer_code == SET
21883 || outer_code == IOR
21884 || outer_code == XOR)
21885 && CONST_DOUBLE_HIGH (x) == 0))
21886 {
21887 *total = COSTS_N_INSNS (1);
21888 return true;
21889 }
066cd967
DE
21890 }
21891 /* FALLTHRU */
21892
3c50106f 21893 case CONST:
066cd967 21894 case HIGH:
3c50106f 21895 case SYMBOL_REF:
066cd967
DE
21896 case MEM:
21897 /* When optimizing for size, MEM should be slightly more expensive
21898 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21899 L1 cache latency is about two instructions. */
f40751dd 21900 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21901 return true;
21902
30a555d9
DE
21903 case LABEL_REF:
21904 *total = 0;
21905 return true;
21906
3c50106f 21907 case PLUS:
f0517163 21908 if (mode == DFmode)
066cd967
DE
21909 {
21910 if (GET_CODE (XEXP (x, 0)) == MULT)
21911 {
21912 /* FNMA accounted in outer NEG. */
21913 if (outer_code == NEG)
21914 *total = rs6000_cost->dmul - rs6000_cost->fp;
21915 else
21916 *total = rs6000_cost->dmul;
21917 }
21918 else
21919 *total = rs6000_cost->fp;
21920 }
f0517163 21921 else if (mode == SFmode)
066cd967
DE
21922 {
21923 /* FNMA accounted in outer NEG. */
21924 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21925 *total = 0;
21926 else
21927 *total = rs6000_cost->fp;
21928 }
f0517163 21929 else
066cd967
DE
21930 *total = COSTS_N_INSNS (1);
21931 return false;
3c50106f 21932
52190329 21933 case MINUS:
f0517163 21934 if (mode == DFmode)
066cd967 21935 {
762c919f
JM
21936 if (GET_CODE (XEXP (x, 0)) == MULT
21937 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21938 {
21939 /* FNMA accounted in outer NEG. */
21940 if (outer_code == NEG)
762c919f 21941 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21942 else
21943 *total = rs6000_cost->dmul;
21944 }
21945 else
21946 *total = rs6000_cost->fp;
21947 }
f0517163 21948 else if (mode == SFmode)
066cd967
DE
21949 {
21950 /* FNMA accounted in outer NEG. */
21951 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21952 *total = 0;
21953 else
21954 *total = rs6000_cost->fp;
21955 }
f0517163 21956 else
c4ad648e 21957 *total = COSTS_N_INSNS (1);
066cd967 21958 return false;
3c50106f
RH
21959
21960 case MULT:
c9dbf840 21961 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21962 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21963 {
8b897cfa
RS
21964 if (INTVAL (XEXP (x, 1)) >= -256
21965 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21966 *total = rs6000_cost->mulsi_const9;
8b897cfa 21967 else
06a67bdd 21968 *total = rs6000_cost->mulsi_const;
3c50106f 21969 }
066cd967
DE
21970 /* FMA accounted in outer PLUS/MINUS. */
21971 else if ((mode == DFmode || mode == SFmode)
21972 && (outer_code == PLUS || outer_code == MINUS))
21973 *total = 0;
f0517163 21974 else if (mode == DFmode)
06a67bdd 21975 *total = rs6000_cost->dmul;
f0517163 21976 else if (mode == SFmode)
06a67bdd 21977 *total = rs6000_cost->fp;
f0517163 21978 else if (mode == DImode)
06a67bdd 21979 *total = rs6000_cost->muldi;
8b897cfa 21980 else
06a67bdd 21981 *total = rs6000_cost->mulsi;
066cd967 21982 return false;
3c50106f
RH
21983
21984 case DIV:
21985 case MOD:
f0517163
RS
21986 if (FLOAT_MODE_P (mode))
21987 {
06a67bdd
RS
21988 *total = mode == DFmode ? rs6000_cost->ddiv
21989 : rs6000_cost->sdiv;
066cd967 21990 return false;
f0517163 21991 }
5efb1046 21992 /* FALLTHRU */
3c50106f
RH
21993
21994 case UDIV:
21995 case UMOD:
627b6fe2
DJ
21996 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21997 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21998 {
21999 if (code == DIV || code == MOD)
22000 /* Shift, addze */
22001 *total = COSTS_N_INSNS (2);
22002 else
22003 /* Shift */
22004 *total = COSTS_N_INSNS (1);
22005 }
c4ad648e 22006 else
627b6fe2
DJ
22007 {
22008 if (GET_MODE (XEXP (x, 1)) == DImode)
22009 *total = rs6000_cost->divdi;
22010 else
22011 *total = rs6000_cost->divsi;
22012 }
22013 /* Add in shift and subtract for MOD. */
22014 if (code == MOD || code == UMOD)
22015 *total += COSTS_N_INSNS (2);
066cd967 22016 return false;
3c50106f 22017
32f56aad 22018 case CTZ:
3c50106f
RH
22019 case FFS:
22020 *total = COSTS_N_INSNS (4);
066cd967 22021 return false;
3c50106f 22022
32f56aad
DE
22023 case POPCOUNT:
22024 *total = COSTS_N_INSNS (6);
22025 return false;
22026
06a67bdd 22027 case NOT:
066cd967
DE
22028 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
22029 {
22030 *total = 0;
22031 return false;
22032 }
22033 /* FALLTHRU */
22034
22035 case AND:
32f56aad 22036 case CLZ:
066cd967
DE
22037 case IOR:
22038 case XOR:
d5861a7a
DE
22039 case ZERO_EXTRACT:
22040 *total = COSTS_N_INSNS (1);
22041 return false;
22042
066cd967
DE
22043 case ASHIFT:
22044 case ASHIFTRT:
22045 case LSHIFTRT:
22046 case ROTATE:
22047 case ROTATERT:
d5861a7a 22048 /* Handle mul_highpart. */
066cd967
DE
22049 if (outer_code == TRUNCATE
22050 && GET_CODE (XEXP (x, 0)) == MULT)
22051 {
22052 if (mode == DImode)
22053 *total = rs6000_cost->muldi;
22054 else
22055 *total = rs6000_cost->mulsi;
22056 return true;
22057 }
d5861a7a
DE
22058 else if (outer_code == AND)
22059 *total = 0;
22060 else
22061 *total = COSTS_N_INSNS (1);
22062 return false;
22063
22064 case SIGN_EXTEND:
22065 case ZERO_EXTEND:
22066 if (GET_CODE (XEXP (x, 0)) == MEM)
22067 *total = 0;
22068 else
22069 *total = COSTS_N_INSNS (1);
066cd967 22070 return false;
06a67bdd 22071
066cd967
DE
22072 case COMPARE:
22073 case NEG:
22074 case ABS:
22075 if (!FLOAT_MODE_P (mode))
22076 {
22077 *total = COSTS_N_INSNS (1);
22078 return false;
22079 }
22080 /* FALLTHRU */
22081
22082 case FLOAT:
22083 case UNSIGNED_FLOAT:
22084 case FIX:
22085 case UNSIGNED_FIX:
06a67bdd
RS
22086 case FLOAT_TRUNCATE:
22087 *total = rs6000_cost->fp;
066cd967 22088 return false;
06a67bdd 22089
a2af5043
DJ
22090 case FLOAT_EXTEND:
22091 if (mode == DFmode)
22092 *total = 0;
22093 else
22094 *total = rs6000_cost->fp;
22095 return false;
22096
06a67bdd
RS
22097 case UNSPEC:
22098 switch (XINT (x, 1))
22099 {
22100 case UNSPEC_FRSP:
22101 *total = rs6000_cost->fp;
22102 return true;
22103
22104 default:
22105 break;
22106 }
22107 break;
22108
22109 case CALL:
22110 case IF_THEN_ELSE:
f40751dd 22111 if (!speed)
06a67bdd
RS
22112 {
22113 *total = COSTS_N_INSNS (1);
22114 return true;
22115 }
066cd967
DE
22116 else if (FLOAT_MODE_P (mode)
22117 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22118 {
22119 *total = rs6000_cost->fp;
22120 return false;
22121 }
06a67bdd
RS
22122 break;
22123
c0600ecd
DE
22124 case EQ:
22125 case GTU:
22126 case LTU:
22e54023
DE
22127 /* Carry bit requires mode == Pmode.
22128 NEG or PLUS already counted so only add one. */
22129 if (mode == Pmode
22130 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22131 {
22e54023
DE
22132 *total = COSTS_N_INSNS (1);
22133 return true;
22134 }
22135 if (outer_code == SET)
22136 {
22137 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22138 {
22e54023 22139 *total = COSTS_N_INSNS (2);
c0600ecd 22140 return true;
c0600ecd 22141 }
22e54023
DE
22142 else if (mode == Pmode)
22143 {
22144 *total = COSTS_N_INSNS (3);
22145 return false;
22146 }
22147 }
22148 /* FALLTHRU */
22149
22150 case GT:
22151 case LT:
22152 case UNORDERED:
22153 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22154 {
22155 *total = COSTS_N_INSNS (2);
22156 return true;
c0600ecd 22157 }
22e54023
DE
22158 /* CC COMPARE. */
22159 if (outer_code == COMPARE)
22160 {
22161 *total = 0;
22162 return true;
22163 }
22164 break;
c0600ecd 22165
3c50106f 22166 default:
06a67bdd 22167 break;
3c50106f 22168 }
06a67bdd
RS
22169
22170 return false;
3c50106f
RH
22171}
22172
34bb030a
DE
22173/* A C expression returning the cost of moving data from a register of class
22174 CLASS1 to one of CLASS2. */
22175
22176int
f676971a 22177rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22178 enum reg_class from, enum reg_class to)
34bb030a
DE
22179{
22180 /* Moves from/to GENERAL_REGS. */
22181 if (reg_classes_intersect_p (to, GENERAL_REGS)
22182 || reg_classes_intersect_p (from, GENERAL_REGS))
22183 {
22184 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22185 from = to;
22186
22187 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22188 return (rs6000_memory_move_cost (mode, from, 0)
22189 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22190
c4ad648e
AM
22191 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22192 shift. */
34bb030a
DE
22193 else if (from == CR_REGS)
22194 return 4;
22195
aafc759a
PH
22196 /* Power6 has slower LR/CTR moves so make them more expensive than
22197 memory in order to bias spills to memory .*/
22198 else if (rs6000_cpu == PROCESSOR_POWER6
22199 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22200 return 6 * hard_regno_nregs[0][mode];
22201
34bb030a 22202 else
c4ad648e 22203 /* A move will cost one instruction per GPR moved. */
c8b622ff 22204 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22205 }
22206
c4ad648e 22207 /* Moving between two similar registers is just one instruction. */
34bb030a 22208 else if (reg_classes_intersect_p (to, from))
7393f7f8 22209 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22210
c4ad648e 22211 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22212 else
f676971a 22213 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22214 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22215}
22216
22217/* A C expressions returning the cost of moving data of MODE from a register to
22218 or from memory. */
22219
22220int
0a2aaacc 22221rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22222 int in ATTRIBUTE_UNUSED)
34bb030a 22223{
0a2aaacc 22224 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22225 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22226 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22227 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22228 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22229 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22230 else
0a2aaacc 22231 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22232}
22233
9c78b944
DE
22234/* Returns a code for a target-specific builtin that implements
22235 reciprocal of the function, or NULL_TREE if not available. */
22236
22237static tree
22238rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22239 bool sqrt ATTRIBUTE_UNUSED)
22240{
22241 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22242 && flag_finite_math_only && !flag_trapping_math
22243 && flag_unsafe_math_optimizations))
22244 return NULL_TREE;
22245
22246 if (md_fn)
22247 return NULL_TREE;
22248 else
22249 switch (fn)
22250 {
22251 case BUILT_IN_SQRTF:
22252 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22253
22254 default:
22255 return NULL_TREE;
22256 }
22257}
22258
ef765ea9
DE
22259/* Newton-Raphson approximation of single-precision floating point divide n/d.
22260 Assumes no trapping math and finite arguments. */
22261
22262void
9c78b944 22263rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22264{
22265 rtx x0, e0, e1, y1, u0, v0, one;
22266
22267 x0 = gen_reg_rtx (SFmode);
22268 e0 = gen_reg_rtx (SFmode);
22269 e1 = gen_reg_rtx (SFmode);
22270 y1 = gen_reg_rtx (SFmode);
22271 u0 = gen_reg_rtx (SFmode);
22272 v0 = gen_reg_rtx (SFmode);
22273 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22274
22275 /* x0 = 1./d estimate */
22276 emit_insn (gen_rtx_SET (VOIDmode, x0,
22277 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22278 UNSPEC_FRES)));
22279 /* e0 = 1. - d * x0 */
22280 emit_insn (gen_rtx_SET (VOIDmode, e0,
22281 gen_rtx_MINUS (SFmode, one,
22282 gen_rtx_MULT (SFmode, d, x0))));
22283 /* e1 = e0 + e0 * e0 */
22284 emit_insn (gen_rtx_SET (VOIDmode, e1,
22285 gen_rtx_PLUS (SFmode,
22286 gen_rtx_MULT (SFmode, e0, e0), e0)));
22287 /* y1 = x0 + e1 * x0 */
22288 emit_insn (gen_rtx_SET (VOIDmode, y1,
22289 gen_rtx_PLUS (SFmode,
22290 gen_rtx_MULT (SFmode, e1, x0), x0)));
22291 /* u0 = n * y1 */
22292 emit_insn (gen_rtx_SET (VOIDmode, u0,
22293 gen_rtx_MULT (SFmode, n, y1)));
22294 /* v0 = n - d * u0 */
22295 emit_insn (gen_rtx_SET (VOIDmode, v0,
22296 gen_rtx_MINUS (SFmode, n,
22297 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22298 /* dst = u0 + v0 * y1 */
22299 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22300 gen_rtx_PLUS (SFmode,
22301 gen_rtx_MULT (SFmode, v0, y1), u0)));
22302}
22303
22304/* Newton-Raphson approximation of double-precision floating point divide n/d.
22305 Assumes no trapping math and finite arguments. */
22306
22307void
9c78b944 22308rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22309{
22310 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22311
22312 x0 = gen_reg_rtx (DFmode);
22313 e0 = gen_reg_rtx (DFmode);
22314 e1 = gen_reg_rtx (DFmode);
22315 e2 = gen_reg_rtx (DFmode);
22316 y1 = gen_reg_rtx (DFmode);
22317 y2 = gen_reg_rtx (DFmode);
22318 y3 = gen_reg_rtx (DFmode);
22319 u0 = gen_reg_rtx (DFmode);
22320 v0 = gen_reg_rtx (DFmode);
22321 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22322
22323 /* x0 = 1./d estimate */
22324 emit_insn (gen_rtx_SET (VOIDmode, x0,
22325 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22326 UNSPEC_FRES)));
22327 /* e0 = 1. - d * x0 */
22328 emit_insn (gen_rtx_SET (VOIDmode, e0,
22329 gen_rtx_MINUS (DFmode, one,
22330 gen_rtx_MULT (SFmode, d, x0))));
22331 /* y1 = x0 + e0 * x0 */
22332 emit_insn (gen_rtx_SET (VOIDmode, y1,
22333 gen_rtx_PLUS (DFmode,
22334 gen_rtx_MULT (DFmode, e0, x0), x0)));
22335 /* e1 = e0 * e0 */
22336 emit_insn (gen_rtx_SET (VOIDmode, e1,
22337 gen_rtx_MULT (DFmode, e0, e0)));
22338 /* y2 = y1 + e1 * y1 */
22339 emit_insn (gen_rtx_SET (VOIDmode, y2,
22340 gen_rtx_PLUS (DFmode,
22341 gen_rtx_MULT (DFmode, e1, y1), y1)));
22342 /* e2 = e1 * e1 */
22343 emit_insn (gen_rtx_SET (VOIDmode, e2,
22344 gen_rtx_MULT (DFmode, e1, e1)));
22345 /* y3 = y2 + e2 * y2 */
22346 emit_insn (gen_rtx_SET (VOIDmode, y3,
22347 gen_rtx_PLUS (DFmode,
22348 gen_rtx_MULT (DFmode, e2, y2), y2)));
22349 /* u0 = n * y3 */
22350 emit_insn (gen_rtx_SET (VOIDmode, u0,
22351 gen_rtx_MULT (DFmode, n, y3)));
22352 /* v0 = n - d * u0 */
22353 emit_insn (gen_rtx_SET (VOIDmode, v0,
22354 gen_rtx_MINUS (DFmode, n,
22355 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22356 /* dst = u0 + v0 * y3 */
22357 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22358 gen_rtx_PLUS (DFmode,
22359 gen_rtx_MULT (DFmode, v0, y3), u0)));
22360}
22361
565ef4ba 22362
9c78b944
DE
22363/* Newton-Raphson approximation of single-precision floating point rsqrt.
22364 Assumes no trapping math and finite arguments. */
22365
22366void
22367rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22368{
22369 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22370 half, one, halfthree, c1, cond, label;
22371
22372 x0 = gen_reg_rtx (SFmode);
22373 x1 = gen_reg_rtx (SFmode);
22374 x2 = gen_reg_rtx (SFmode);
22375 y1 = gen_reg_rtx (SFmode);
22376 u0 = gen_reg_rtx (SFmode);
22377 u1 = gen_reg_rtx (SFmode);
22378 u2 = gen_reg_rtx (SFmode);
22379 v0 = gen_reg_rtx (SFmode);
22380 v1 = gen_reg_rtx (SFmode);
22381 v2 = gen_reg_rtx (SFmode);
22382 t0 = gen_reg_rtx (SFmode);
22383 halfthree = gen_reg_rtx (SFmode);
22384 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22385 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22386
22387 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22388 emit_insn (gen_rtx_SET (VOIDmode, t0,
22389 gen_rtx_MULT (SFmode, src, src)));
22390
22391 emit_insn (gen_rtx_SET (VOIDmode, cond,
22392 gen_rtx_COMPARE (CCFPmode, t0, src)));
22393 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22394 emit_unlikely_jump (c1, label);
22395
22396 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22397 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22398
22399 /* halfthree = 1.5 = 1.0 + 0.5 */
22400 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22401 gen_rtx_PLUS (SFmode, one, half)));
22402
22403 /* x0 = rsqrt estimate */
22404 emit_insn (gen_rtx_SET (VOIDmode, x0,
22405 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22406 UNSPEC_RSQRT)));
22407
22408 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22409 emit_insn (gen_rtx_SET (VOIDmode, y1,
22410 gen_rtx_MINUS (SFmode,
22411 gen_rtx_MULT (SFmode, src, halfthree),
22412 src)));
22413
22414 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22415 emit_insn (gen_rtx_SET (VOIDmode, u0,
22416 gen_rtx_MULT (SFmode, x0, x0)));
22417 emit_insn (gen_rtx_SET (VOIDmode, v0,
22418 gen_rtx_MINUS (SFmode,
22419 halfthree,
22420 gen_rtx_MULT (SFmode, y1, u0))));
22421 emit_insn (gen_rtx_SET (VOIDmode, x1,
22422 gen_rtx_MULT (SFmode, x0, v0)));
22423
22424 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22425 emit_insn (gen_rtx_SET (VOIDmode, u1,
22426 gen_rtx_MULT (SFmode, x1, x1)));
22427 emit_insn (gen_rtx_SET (VOIDmode, v1,
22428 gen_rtx_MINUS (SFmode,
22429 halfthree,
22430 gen_rtx_MULT (SFmode, y1, u1))));
22431 emit_insn (gen_rtx_SET (VOIDmode, x2,
22432 gen_rtx_MULT (SFmode, x1, v1)));
22433
22434 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22435 emit_insn (gen_rtx_SET (VOIDmode, u2,
22436 gen_rtx_MULT (SFmode, x2, x2)));
22437 emit_insn (gen_rtx_SET (VOIDmode, v2,
22438 gen_rtx_MINUS (SFmode,
22439 halfthree,
22440 gen_rtx_MULT (SFmode, y1, u2))));
22441 emit_insn (gen_rtx_SET (VOIDmode, dst,
22442 gen_rtx_MULT (SFmode, x2, v2)));
22443
22444 emit_label (XEXP (label, 0));
22445}
22446
565ef4ba
RS
22447/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22448 target, and SRC is the argument operand. */
22449
22450void
22451rs6000_emit_popcount (rtx dst, rtx src)
22452{
22453 enum machine_mode mode = GET_MODE (dst);
22454 rtx tmp1, tmp2;
22455
22456 tmp1 = gen_reg_rtx (mode);
22457
22458 if (mode == SImode)
22459 {
22460 emit_insn (gen_popcntbsi2 (tmp1, src));
22461 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22462 NULL_RTX, 0);
22463 tmp2 = force_reg (SImode, tmp2);
22464 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22465 }
22466 else
22467 {
22468 emit_insn (gen_popcntbdi2 (tmp1, src));
22469 tmp2 = expand_mult (DImode, tmp1,
22470 GEN_INT ((HOST_WIDE_INT)
22471 0x01010101 << 32 | 0x01010101),
22472 NULL_RTX, 0);
22473 tmp2 = force_reg (DImode, tmp2);
22474 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22475 }
22476}
22477
22478
22479/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22480 target, and SRC is the argument operand. */
22481
22482void
22483rs6000_emit_parity (rtx dst, rtx src)
22484{
22485 enum machine_mode mode = GET_MODE (dst);
22486 rtx tmp;
22487
22488 tmp = gen_reg_rtx (mode);
22489 if (mode == SImode)
22490 {
22491 /* Is mult+shift >= shift+xor+shift+xor? */
22492 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22493 {
22494 rtx tmp1, tmp2, tmp3, tmp4;
22495
22496 tmp1 = gen_reg_rtx (SImode);
22497 emit_insn (gen_popcntbsi2 (tmp1, src));
22498
22499 tmp2 = gen_reg_rtx (SImode);
22500 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22501 tmp3 = gen_reg_rtx (SImode);
22502 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22503
22504 tmp4 = gen_reg_rtx (SImode);
22505 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22506 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22507 }
22508 else
22509 rs6000_emit_popcount (tmp, src);
22510 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22511 }
22512 else
22513 {
22514 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22515 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22516 {
22517 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22518
22519 tmp1 = gen_reg_rtx (DImode);
22520 emit_insn (gen_popcntbdi2 (tmp1, src));
22521
22522 tmp2 = gen_reg_rtx (DImode);
22523 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22524 tmp3 = gen_reg_rtx (DImode);
22525 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22526
22527 tmp4 = gen_reg_rtx (DImode);
22528 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22529 tmp5 = gen_reg_rtx (DImode);
22530 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22531
22532 tmp6 = gen_reg_rtx (DImode);
22533 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22534 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22535 }
22536 else
22537 rs6000_emit_popcount (tmp, src);
22538 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22539 }
22540}
22541
ded9bf77
AH
22542/* Return an RTX representing where to find the function value of a
22543 function returning MODE. */
22544static rtx
22545rs6000_complex_function_value (enum machine_mode mode)
22546{
22547 unsigned int regno;
22548 rtx r1, r2;
22549 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22550 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22551
18f63bfa
AH
22552 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22553 regno = FP_ARG_RETURN;
354ed18f
AH
22554 else
22555 {
18f63bfa 22556 regno = GP_ARG_RETURN;
ded9bf77 22557
18f63bfa
AH
22558 /* 32-bit is OK since it'll go in r3/r4. */
22559 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22560 return gen_rtx_REG (mode, regno);
22561 }
22562
18f63bfa
AH
22563 if (inner_bytes >= 8)
22564 return gen_rtx_REG (mode, regno);
22565
ded9bf77
AH
22566 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22567 const0_rtx);
22568 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22569 GEN_INT (inner_bytes));
ded9bf77
AH
22570 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22571}
22572
a6ebc39a
AH
22573/* Define how to find the value returned by a function.
22574 VALTYPE is the data type of the value (as a tree).
22575 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22576 otherwise, FUNC is 0.
22577
22578 On the SPE, both FPs and vectors are returned in r3.
22579
22580 On RS/6000 an integer value is in r3 and a floating-point value is in
22581 fp1, unless -msoft-float. */
22582
22583rtx
586de218 22584rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22585{
22586 enum machine_mode mode;
2a8fa26c 22587 unsigned int regno;
a6ebc39a 22588
594a51fe
SS
22589 /* Special handling for structs in darwin64. */
22590 if (rs6000_darwin64_abi
22591 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22592 && TREE_CODE (valtype) == RECORD_TYPE
22593 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22594 {
22595 CUMULATIVE_ARGS valcum;
22596 rtx valret;
22597
0b5383eb 22598 valcum.words = 0;
594a51fe
SS
22599 valcum.fregno = FP_ARG_MIN_REG;
22600 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22601 /* Do a trial code generation as if this were going to be passed as
22602 an argument; if any part goes in memory, we return NULL. */
22603 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22604 if (valret)
22605 return valret;
22606 /* Otherwise fall through to standard ABI rules. */
22607 }
22608
0e67400a
FJ
22609 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22610 {
22611 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22612 return gen_rtx_PARALLEL (DImode,
22613 gen_rtvec (2,
22614 gen_rtx_EXPR_LIST (VOIDmode,
22615 gen_rtx_REG (SImode, GP_ARG_RETURN),
22616 const0_rtx),
22617 gen_rtx_EXPR_LIST (VOIDmode,
22618 gen_rtx_REG (SImode,
22619 GP_ARG_RETURN + 1),
22620 GEN_INT (4))));
22621 }
0f086e42
FJ
22622 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22623 {
22624 return gen_rtx_PARALLEL (DCmode,
22625 gen_rtvec (4,
22626 gen_rtx_EXPR_LIST (VOIDmode,
22627 gen_rtx_REG (SImode, GP_ARG_RETURN),
22628 const0_rtx),
22629 gen_rtx_EXPR_LIST (VOIDmode,
22630 gen_rtx_REG (SImode,
22631 GP_ARG_RETURN + 1),
22632 GEN_INT (4)),
22633 gen_rtx_EXPR_LIST (VOIDmode,
22634 gen_rtx_REG (SImode,
22635 GP_ARG_RETURN + 2),
22636 GEN_INT (8)),
22637 gen_rtx_EXPR_LIST (VOIDmode,
22638 gen_rtx_REG (SImode,
22639 GP_ARG_RETURN + 3),
22640 GEN_INT (12))));
22641 }
602ea4d3 22642
7348aa7f
FXC
22643 mode = TYPE_MODE (valtype);
22644 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22645 || POINTER_TYPE_P (valtype))
b78d48dd 22646 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22647
e41b2a33
PB
22648 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22649 /* _Decimal128 must use an even/odd register pair. */
22650 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
2c5cac98
ME
22651 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS
22652 && ((TARGET_SINGLE_FLOAT && (mode == SFmode)) || TARGET_DOUBLE_FLOAT))
2a8fa26c 22653 regno = FP_ARG_RETURN;
ded9bf77 22654 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22655 && targetm.calls.split_complex_arg)
ded9bf77 22656 return rs6000_complex_function_value (mode);
44688022 22657 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22658 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22659 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22660 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22661 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22662 && (mode == DFmode || mode == DCmode
22663 || mode == TFmode || mode == TCmode))
18f63bfa 22664 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22665 else
22666 regno = GP_ARG_RETURN;
22667
22668 return gen_rtx_REG (mode, regno);
22669}
22670
ded9bf77
AH
22671/* Define how to find the value returned by a library function
22672 assuming the value has mode MODE. */
22673rtx
22674rs6000_libcall_value (enum machine_mode mode)
22675{
22676 unsigned int regno;
22677
2e6c9641
FJ
22678 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22679 {
22680 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22681 return gen_rtx_PARALLEL (DImode,
22682 gen_rtvec (2,
22683 gen_rtx_EXPR_LIST (VOIDmode,
22684 gen_rtx_REG (SImode, GP_ARG_RETURN),
22685 const0_rtx),
22686 gen_rtx_EXPR_LIST (VOIDmode,
22687 gen_rtx_REG (SImode,
22688 GP_ARG_RETURN + 1),
22689 GEN_INT (4))));
22690 }
22691
e41b2a33
PB
22692 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22693 /* _Decimal128 must use an even/odd register pair. */
22694 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22695 else if (SCALAR_FLOAT_MODE_P (mode)
d083fbba
ME
22696 && TARGET_HARD_FLOAT && TARGET_FPRS
22697 && ((TARGET_SINGLE_FLOAT && mode == SFmode) || TARGET_DOUBLE_FLOAT))
ded9bf77 22698 regno = FP_ARG_RETURN;
44688022
AM
22699 else if (ALTIVEC_VECTOR_MODE (mode)
22700 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22701 regno = ALTIVEC_ARG_RETURN;
42ba5130 22702 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22703 return rs6000_complex_function_value (mode);
18f63bfa 22704 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22705 && (mode == DFmode || mode == DCmode
22706 || mode == TFmode || mode == TCmode))
18f63bfa 22707 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22708 else
22709 regno = GP_ARG_RETURN;
22710
22711 return gen_rtx_REG (mode, regno);
22712}
22713
d1d0c603
JJ
22714/* Define the offset between two registers, FROM to be eliminated and its
22715 replacement TO, at the start of a routine. */
22716HOST_WIDE_INT
22717rs6000_initial_elimination_offset (int from, int to)
22718{
22719 rs6000_stack_t *info = rs6000_stack_info ();
22720 HOST_WIDE_INT offset;
22721
7d5175e1 22722 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22723 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22724 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22725 {
22726 offset = info->push_p ? 0 : -info->total_size;
22727 if (FRAME_GROWS_DOWNWARD)
5b667039 22728 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22729 }
22730 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22731 offset = FRAME_GROWS_DOWNWARD
5b667039 22732 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22733 : 0;
22734 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22735 offset = info->total_size;
22736 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22737 offset = info->push_p ? info->total_size : 0;
22738 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22739 offset = 0;
22740 else
37409796 22741 gcc_unreachable ();
d1d0c603
JJ
22742
22743 return offset;
22744}
22745
96714395 22746static rtx
a2369ed3 22747rs6000_dwarf_register_span (rtx reg)
96714395 22748{
6cd1d2e2
DJ
22749 rtx parts[8];
22750 int i, words;
22751 unsigned regno = REGNO (reg);
22752 enum machine_mode mode = GET_MODE (reg);
96714395 22753
4d4cbc0e 22754 if (TARGET_SPE
6cd1d2e2 22755 && regno < 32
4d4cbc0e 22756 && (SPE_VECTOR_MODE (GET_MODE (reg))
6cd1d2e2
DJ
22757 || (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode)
22758 && mode != SFmode && mode != SDmode && mode != SCmode)))
4d4cbc0e
AH
22759 ;
22760 else
96714395
AH
22761 return NULL_RTX;
22762
22763 regno = REGNO (reg);
22764
22765 /* The duality of the SPE register size wreaks all kinds of havoc.
22766 This is a way of distinguishing r0 in 32-bits from r0 in
22767 64-bits. */
6cd1d2e2
DJ
22768 words = (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
22769 gcc_assert (words <= 4);
22770 for (i = 0; i < words; i++, regno++)
22771 {
22772 if (BYTES_BIG_ENDIAN)
22773 {
22774 parts[2 * i] = gen_rtx_REG (SImode, regno + 1200);
22775 parts[2 * i + 1] = gen_rtx_REG (SImode, regno);
22776 }
22777 else
22778 {
22779 parts[2 * i] = gen_rtx_REG (SImode, regno);
22780 parts[2 * i + 1] = gen_rtx_REG (SImode, regno + 1200);
22781 }
22782 }
22783
22784 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (words * 2, parts));
96714395
AH
22785}
22786
37ea0b7e
JM
22787/* Fill in sizes for SPE register high parts in table used by unwinder. */
22788
22789static void
22790rs6000_init_dwarf_reg_sizes_extra (tree address)
22791{
22792 if (TARGET_SPE)
22793 {
22794 int i;
22795 enum machine_mode mode = TYPE_MODE (char_type_node);
bbbbb16a 22796 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
37ea0b7e
JM
22797 rtx mem = gen_rtx_MEM (BLKmode, addr);
22798 rtx value = gen_int_mode (4, mode);
22799
22800 for (i = 1201; i < 1232; i++)
22801 {
22802 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22803 HOST_WIDE_INT offset
22804 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22805
22806 emit_move_insn (adjust_address (mem, mode, offset), value);
22807 }
22808 }
22809}
22810
93c9d1ba
AM
22811/* Map internal gcc register numbers to DWARF2 register numbers. */
22812
22813unsigned int
22814rs6000_dbx_register_number (unsigned int regno)
22815{
22816 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22817 return regno;
22818 if (regno == MQ_REGNO)
22819 return 100;
1de43f85 22820 if (regno == LR_REGNO)
93c9d1ba 22821 return 108;
1de43f85 22822 if (regno == CTR_REGNO)
93c9d1ba
AM
22823 return 109;
22824 if (CR_REGNO_P (regno))
22825 return regno - CR0_REGNO + 86;
22826 if (regno == XER_REGNO)
22827 return 101;
22828 if (ALTIVEC_REGNO_P (regno))
22829 return regno - FIRST_ALTIVEC_REGNO + 1124;
22830 if (regno == VRSAVE_REGNO)
22831 return 356;
22832 if (regno == VSCR_REGNO)
22833 return 67;
22834 if (regno == SPE_ACC_REGNO)
22835 return 99;
22836 if (regno == SPEFSCR_REGNO)
22837 return 612;
22838 /* SPE high reg number. We get these values of regno from
22839 rs6000_dwarf_register_span. */
37409796
NS
22840 gcc_assert (regno >= 1200 && regno < 1232);
22841 return regno;
93c9d1ba
AM
22842}
22843
93f90be6 22844/* target hook eh_return_filter_mode */
f676971a 22845static enum machine_mode
93f90be6
FJ
22846rs6000_eh_return_filter_mode (void)
22847{
22848 return TARGET_32BIT ? SImode : word_mode;
22849}
22850
00b79d54
BE
22851/* Target hook for scalar_mode_supported_p. */
22852static bool
22853rs6000_scalar_mode_supported_p (enum machine_mode mode)
22854{
22855 if (DECIMAL_FLOAT_MODE_P (mode))
22856 return true;
22857 else
22858 return default_scalar_mode_supported_p (mode);
22859}
22860
f676971a
EC
22861/* Target hook for vector_mode_supported_p. */
22862static bool
22863rs6000_vector_mode_supported_p (enum machine_mode mode)
22864{
22865
96038623
DE
22866 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22867 return true;
22868
f676971a
EC
22869 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22870 return true;
22871
22872 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22873 return true;
22874
22875 else
22876 return false;
22877}
22878
bb8df8a6
EC
22879/* Target hook for invalid_arg_for_unprototyped_fn. */
22880static const char *
3101faab 22881invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22882{
22883 return (!rs6000_darwin64_abi
22884 && typelist == 0
22885 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22886 && (funcdecl == NULL_TREE
22887 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22888 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22889 ? N_("AltiVec argument passed to unprototyped function")
22890 : NULL;
22891}
22892
3aebbe5f
JJ
22893/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22894 setup by using __stack_chk_fail_local hidden function instead of
22895 calling __stack_chk_fail directly. Otherwise it is better to call
22896 __stack_chk_fail directly. */
22897
22898static tree
22899rs6000_stack_protect_fail (void)
22900{
22901 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22902 ? default_hidden_stack_protect_fail ()
22903 : default_external_stack_protect_fail ();
22904}
22905
c921bad8
AP
22906void
22907rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
22908 int num_operands ATTRIBUTE_UNUSED)
22909{
22910 if (rs6000_warn_cell_microcode)
22911 {
22912 const char *temp;
22913 int insn_code_number = recog_memoized (insn);
22914 location_t location = locator_location (INSN_LOCATOR (insn));
22915
22916 /* Punt on insns we cannot recognize. */
22917 if (insn_code_number < 0)
22918 return;
22919
22920 temp = get_insn_template (insn_code_number, insn);
22921
22922 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
22923 warning_at (location, OPT_mwarn_cell_microcode,
22924 "emitting microcode insn %s\t[%s] #%d",
22925 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22926 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
22927 warning_at (location, OPT_mwarn_cell_microcode,
22928 "emitting conditional microcode insn %s\t[%s] #%d",
22929 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22930 }
22931}
22932
17211ab5 22933#include "gt-rs6000.h"